mirror of
https://github.com/projecthorus/sondehub-infra.git
synced 2024-12-21 14:07:52 +00:00
add gzip and bug fixes
This commit is contained in:
parent
d77bfa8a7c
commit
a3159854cc
@ -11,7 +11,7 @@ import http.client
|
||||
import math
|
||||
import logging
|
||||
|
||||
#logging.basicConfig(format='%(asctime)s %(message)s', level=logging.DEBUG)
|
||||
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.DEBUG)
|
||||
|
||||
HOST = os.getenv("ES")
|
||||
|
||||
@ -206,7 +206,7 @@ def predict(event, context):
|
||||
{
|
||||
"range": {
|
||||
"datetime": {
|
||||
"gte": "now-5m",
|
||||
"gte": "now-10m",
|
||||
"lte": "now",
|
||||
"format": "strict_date_optional_time"
|
||||
}
|
||||
@ -222,7 +222,8 @@ def predict(event, context):
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"size": 0
|
||||
}
|
||||
if "queryStringParameters" in event:
|
||||
if "vehicles" in event["queryStringParameters"] and event["queryStringParameters"]["vehicles"] != "RS_*;*chase" and event["queryStringParameters"]["vehicles"] != "":
|
||||
@ -233,6 +234,7 @@ def predict(event, context):
|
||||
}
|
||||
}
|
||||
)
|
||||
payload['query']['bool']['filter'][1]['range']['datetime']['gte'] = 'now-6h' # for single sonde allow longer predictions
|
||||
logging.debug("Start ES Request")
|
||||
results = es_request(payload, path, "GET")
|
||||
logging.debug("Finished ES Request")
|
||||
@ -277,6 +279,8 @@ def predict(event, context):
|
||||
)
|
||||
res = conn.getresponse()
|
||||
data = res.read()
|
||||
if res.code != 200:
|
||||
logging.debug(data)
|
||||
serial_data[serial] = json.loads(data.decode("utf-8"))
|
||||
logging.debug("Stop Predict")
|
||||
output = []
|
||||
@ -285,32 +289,32 @@ def predict(event, context):
|
||||
|
||||
|
||||
data = []
|
||||
if 'prediction' in value:
|
||||
for stage in value['prediction']:
|
||||
if stage['stage'] == 'ascent' and serials[serial]['rate'] < 0: # ignore ascent stage if we have already burst
|
||||
continue
|
||||
else:
|
||||
for item in stage['trajectory']:
|
||||
data.append({
|
||||
"time": int(datetime.fromisoformat(item['datetime'].split(".")[0].replace("Z","")).timestamp()),
|
||||
"lat": item['latitude'],
|
||||
"lon": item['longitude'] - 360 if item['longitude'] > 180 else item['longitude'],
|
||||
"alt": item['altitude'],
|
||||
})
|
||||
|
||||
for stage in value['prediction']:
|
||||
if stage['stage'] == 'ascent' and serials[serial]['rate'] < 0: # ignore ascent stage if we have already burst
|
||||
continue
|
||||
else:
|
||||
for item in stage['trajectory']:
|
||||
data.append({
|
||||
"time": int(datetime.fromisoformat(item['datetime'].split(".")[0].replace("Z","")).timestamp()),
|
||||
"lat": item['latitude'],
|
||||
"lon": item['longitude'] - 360 if item['longitude'] > 180 else item['longitude'],
|
||||
"alt": item['altitude'],
|
||||
})
|
||||
|
||||
output.append({
|
||||
"vehicle": serial,
|
||||
"time": value['request']['launch_datetime'],
|
||||
"latitude": value['request']['launch_latitude'],
|
||||
"longitude": value['request']['launch_longitude'],
|
||||
"altitude": value['request']['launch_altitude'],
|
||||
"ascent_rate":value['request']['ascent_rate'],
|
||||
"descent_rate":value['request']['descent_rate'],
|
||||
"burst_altitude": value['request']['burst_altitude'],
|
||||
"descending": 1 if serials[serial]['rate'] < 0 else 0,
|
||||
"landed": 0,
|
||||
"data": json.dumps(data)
|
||||
})
|
||||
output.append({
|
||||
"vehicle": serial,
|
||||
"time": value['request']['launch_datetime'],
|
||||
"latitude": value['request']['launch_latitude'],
|
||||
"longitude": value['request']['launch_longitude'],
|
||||
"altitude": value['request']['launch_altitude'],
|
||||
"ascent_rate":value['request']['ascent_rate'],
|
||||
"descent_rate":value['request']['descent_rate'],
|
||||
"burst_altitude": value['request']['burst_altitude'],
|
||||
"descending": 1 if serials[serial]['rate'] < 0 else 0,
|
||||
"landed": 0,
|
||||
"data": json.dumps(data)
|
||||
})
|
||||
logging.debug("Finished")
|
||||
return json.dumps(output)
|
||||
|
||||
@ -340,6 +344,7 @@ if __name__ == "__main__":
|
||||
# vehicles: RS_*;*chase
|
||||
print(predict(
|
||||
{"queryStringParameters" : {
|
||||
# "vehicles": "S4610686"
|
||||
}},{}
|
||||
))
|
||||
|
||||
|
@ -11,6 +11,10 @@ from datetime import datetime, timedelta, timezone
|
||||
import sys, traceback
|
||||
import re
|
||||
import html
|
||||
import base64
|
||||
import gzip
|
||||
from io import BytesIO
|
||||
|
||||
|
||||
HOST = os.getenv("ES")
|
||||
# get current sondes, filter by date, location
|
||||
@ -480,7 +484,22 @@ def datanew(event, context):
|
||||
output["positions"]["position"] = sorted(
|
||||
output["positions"]["position"], key=lambda k: k["position_id"]
|
||||
)
|
||||
return json.dumps(output)
|
||||
compressed = BytesIO()
|
||||
with gzip.GzipFile(fileobj=compressed, mode='w') as f:
|
||||
json_response = json.dumps(output)
|
||||
f.write(json_response.encode('utf-8'))
|
||||
|
||||
gzippedResponse = compressed.getvalue()
|
||||
return {
|
||||
"body": base64.b64encode(gzippedResponse).decode(),
|
||||
"isBase64Encoded": True,
|
||||
"statusCode": 200,
|
||||
"headers": {
|
||||
"Content-Encoding": "gzip",
|
||||
"content-type": "application/json"
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
def get_listeners(event, context):
|
||||
@ -606,21 +625,21 @@ if __name__ == "__main__":
|
||||
# max_positions: 0
|
||||
# position_id: 0
|
||||
# vehicles: RS_*;*chase
|
||||
# print(
|
||||
# datanew(
|
||||
# {
|
||||
# "queryStringParameters": {
|
||||
# "type": "positions",
|
||||
# "mode": "12hours",
|
||||
# "position_id": "0",
|
||||
# "vehicles": ""
|
||||
# }
|
||||
# },
|
||||
# {},
|
||||
# )
|
||||
# )
|
||||
print(
|
||||
get_listeners(
|
||||
{},{}
|
||||
datanew(
|
||||
{
|
||||
"queryStringParameters": {
|
||||
"type": "positions",
|
||||
"mode": "12hours",
|
||||
"position_id": "0",
|
||||
"vehicles": "T1240847"
|
||||
}
|
||||
},
|
||||
{},
|
||||
)
|
||||
)
|
||||
# print(
|
||||
# get_listeners(
|
||||
# {},{}
|
||||
# )
|
||||
# )
|
||||
|
Loading…
Reference in New Issue
Block a user