mirror of
https://github.com/projecthorus/sondehub-infra.git
synced 2025-01-02 19:36:47 +00:00
add gzip and bug fixes
This commit is contained in:
parent
d77bfa8a7c
commit
a3159854cc
@ -11,7 +11,7 @@ import http.client
|
|||||||
import math
|
import math
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
#logging.basicConfig(format='%(asctime)s %(message)s', level=logging.DEBUG)
|
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.DEBUG)
|
||||||
|
|
||||||
HOST = os.getenv("ES")
|
HOST = os.getenv("ES")
|
||||||
|
|
||||||
@ -206,7 +206,7 @@ def predict(event, context):
|
|||||||
{
|
{
|
||||||
"range": {
|
"range": {
|
||||||
"datetime": {
|
"datetime": {
|
||||||
"gte": "now-5m",
|
"gte": "now-10m",
|
||||||
"lte": "now",
|
"lte": "now",
|
||||||
"format": "strict_date_optional_time"
|
"format": "strict_date_optional_time"
|
||||||
}
|
}
|
||||||
@ -222,7 +222,8 @@ def predict(event, context):
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
|
"size": 0
|
||||||
}
|
}
|
||||||
if "queryStringParameters" in event:
|
if "queryStringParameters" in event:
|
||||||
if "vehicles" in event["queryStringParameters"] and event["queryStringParameters"]["vehicles"] != "RS_*;*chase" and event["queryStringParameters"]["vehicles"] != "":
|
if "vehicles" in event["queryStringParameters"] and event["queryStringParameters"]["vehicles"] != "RS_*;*chase" and event["queryStringParameters"]["vehicles"] != "":
|
||||||
@ -233,6 +234,7 @@ def predict(event, context):
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
payload['query']['bool']['filter'][1]['range']['datetime']['gte'] = 'now-6h' # for single sonde allow longer predictions
|
||||||
logging.debug("Start ES Request")
|
logging.debug("Start ES Request")
|
||||||
results = es_request(payload, path, "GET")
|
results = es_request(payload, path, "GET")
|
||||||
logging.debug("Finished ES Request")
|
logging.debug("Finished ES Request")
|
||||||
@ -277,6 +279,8 @@ def predict(event, context):
|
|||||||
)
|
)
|
||||||
res = conn.getresponse()
|
res = conn.getresponse()
|
||||||
data = res.read()
|
data = res.read()
|
||||||
|
if res.code != 200:
|
||||||
|
logging.debug(data)
|
||||||
serial_data[serial] = json.loads(data.decode("utf-8"))
|
serial_data[serial] = json.loads(data.decode("utf-8"))
|
||||||
logging.debug("Stop Predict")
|
logging.debug("Stop Predict")
|
||||||
output = []
|
output = []
|
||||||
@ -285,7 +289,7 @@ def predict(event, context):
|
|||||||
|
|
||||||
|
|
||||||
data = []
|
data = []
|
||||||
|
if 'prediction' in value:
|
||||||
for stage in value['prediction']:
|
for stage in value['prediction']:
|
||||||
if stage['stage'] == 'ascent' and serials[serial]['rate'] < 0: # ignore ascent stage if we have already burst
|
if stage['stage'] == 'ascent' and serials[serial]['rate'] < 0: # ignore ascent stage if we have already burst
|
||||||
continue
|
continue
|
||||||
@ -340,6 +344,7 @@ if __name__ == "__main__":
|
|||||||
# vehicles: RS_*;*chase
|
# vehicles: RS_*;*chase
|
||||||
print(predict(
|
print(predict(
|
||||||
{"queryStringParameters" : {
|
{"queryStringParameters" : {
|
||||||
|
# "vehicles": "S4610686"
|
||||||
}},{}
|
}},{}
|
||||||
))
|
))
|
||||||
|
|
||||||
|
@ -11,6 +11,10 @@ from datetime import datetime, timedelta, timezone
|
|||||||
import sys, traceback
|
import sys, traceback
|
||||||
import re
|
import re
|
||||||
import html
|
import html
|
||||||
|
import base64
|
||||||
|
import gzip
|
||||||
|
from io import BytesIO
|
||||||
|
|
||||||
|
|
||||||
HOST = os.getenv("ES")
|
HOST = os.getenv("ES")
|
||||||
# get current sondes, filter by date, location
|
# get current sondes, filter by date, location
|
||||||
@ -480,7 +484,22 @@ def datanew(event, context):
|
|||||||
output["positions"]["position"] = sorted(
|
output["positions"]["position"] = sorted(
|
||||||
output["positions"]["position"], key=lambda k: k["position_id"]
|
output["positions"]["position"], key=lambda k: k["position_id"]
|
||||||
)
|
)
|
||||||
return json.dumps(output)
|
compressed = BytesIO()
|
||||||
|
with gzip.GzipFile(fileobj=compressed, mode='w') as f:
|
||||||
|
json_response = json.dumps(output)
|
||||||
|
f.write(json_response.encode('utf-8'))
|
||||||
|
|
||||||
|
gzippedResponse = compressed.getvalue()
|
||||||
|
return {
|
||||||
|
"body": base64.b64encode(gzippedResponse).decode(),
|
||||||
|
"isBase64Encoded": True,
|
||||||
|
"statusCode": 200,
|
||||||
|
"headers": {
|
||||||
|
"Content-Encoding": "gzip",
|
||||||
|
"content-type": "application/json"
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def get_listeners(event, context):
|
def get_listeners(event, context):
|
||||||
@ -606,21 +625,21 @@ if __name__ == "__main__":
|
|||||||
# max_positions: 0
|
# max_positions: 0
|
||||||
# position_id: 0
|
# position_id: 0
|
||||||
# vehicles: RS_*;*chase
|
# vehicles: RS_*;*chase
|
||||||
# print(
|
|
||||||
# datanew(
|
|
||||||
# {
|
|
||||||
# "queryStringParameters": {
|
|
||||||
# "type": "positions",
|
|
||||||
# "mode": "12hours",
|
|
||||||
# "position_id": "0",
|
|
||||||
# "vehicles": ""
|
|
||||||
# }
|
|
||||||
# },
|
|
||||||
# {},
|
|
||||||
# )
|
|
||||||
# )
|
|
||||||
print(
|
print(
|
||||||
get_listeners(
|
datanew(
|
||||||
{},{}
|
{
|
||||||
|
"queryStringParameters": {
|
||||||
|
"type": "positions",
|
||||||
|
"mode": "12hours",
|
||||||
|
"position_id": "0",
|
||||||
|
"vehicles": "T1240847"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{},
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
# print(
|
||||||
|
# get_listeners(
|
||||||
|
# {},{}
|
||||||
|
# )
|
||||||
|
# )
|
||||||
|
Loading…
Reference in New Issue
Block a user