sondehub-infra/lambda/query/__init__.py

489 lines
16 KiB
Python
Raw Normal View History

2021-02-02 07:14:38 +00:00
import json
2021-02-22 06:13:30 +00:00
from datetime import datetime, timedelta, timezone
2021-07-18 22:38:31 +00:00
import base64
import gzip
from io import BytesIO
2021-12-20 06:02:02 +00:00
import es
2021-02-02 10:44:39 +00:00
2021-02-02 07:14:38 +00:00
def get_sondes(event, context):
path = "telm-*/_search"
payload = {
"size": 0,
2021-02-02 07:14:38 +00:00
"aggs": {
"2": {
2021-02-02 10:44:39 +00:00
"terms": {
"field": "serial.keyword",
"order": {"_key": "desc"},
"size": 10000,
2021-02-02 07:14:38 +00:00
},
2021-02-02 10:44:39 +00:00
"aggs": {
"1": {
"top_hits": {
"size": 1,
"sort": [{"datetime": {"order": "desc"}}],
2021-02-02 07:14:38 +00:00
}
}
2021-02-02 10:44:39 +00:00
},
2021-02-02 07:14:38 +00:00
}
},
2021-02-02 10:44:39 +00:00
"query": {"bool": {"filter": [{"match_all": {}}]}},
2021-02-02 07:14:38 +00:00
}
# add filters
if "queryStringParameters" in event:
if "last" in event["queryStringParameters"]:
payload["query"]["bool"]["filter"].append(
{
"range": {
"datetime": {
"gte": f"now-{abs(int(event['queryStringParameters']['last']))}s",
2021-07-24 09:33:29 +00:00
"lte": "now+1m",
2021-02-02 07:14:38 +00:00
}
}
}
)
else:
payload["query"]["bool"]["filter"].append(
{"range": {"datetime": {"gte": "now-1d", "lte": "now+1m"}}}
)
2021-02-02 10:44:39 +00:00
if (
"lat" in event["queryStringParameters"]
and "lon" in event["queryStringParameters"]
and "distance" in event["queryStringParameters"]
):
2021-02-02 07:14:38 +00:00
payload["query"]["bool"]["filter"].append(
{
"geo_distance": {
"distance": f"{int(event['queryStringParameters']['distance'])}m",
"position": {
2021-02-02 10:44:39 +00:00
"lat": float(event["queryStringParameters"]["lat"]),
"lon": float(event["queryStringParameters"]["lon"]),
},
2021-02-02 07:14:38 +00:00
}
}
)
else:
2021-02-02 07:14:38 +00:00
payload["query"]["bool"]["filter"].append(
{"range": {"datetime": {"gte": "now-1d", "lte": "now+1m"}}}
2021-02-02 10:44:39 +00:00
)
2022-01-02 06:33:59 +00:00
try:
results = es.request(json.dumps(payload), path, "POST")
except:
print(json.dumps(event))
raise
2021-02-02 10:44:39 +00:00
buckets = results["aggregations"]["2"]["buckets"]
sondes = {
bucket["1"]["hits"]["hits"][0]["_source"]["serial"]: bucket["1"]["hits"][
"hits"
][0]["_source"]
for bucket in buckets
}
return json.dumps(sondes)
def get_telem(event, context):
durations = { # ideally we shouldn't need to predefine these, but it's a shit load of data and we don't need want to overload ES
2023-01-17 23:19:49 +00:00
"3d": (259200, 1200),
"1d": (86400, 600),
"12h": (43200, 600),
"6h": (21600, 240),
"3h": (10800, 120),
"1h": (3600, 60),
"30m": (1800, 30),
"1m": (60, 5),
"15s": (15, 1),
"0": (0, 1) # for getting a single time point
2021-02-02 10:44:39 +00:00
}
duration_query = "3h"
2021-04-09 07:14:14 +00:00
requested_time = datetime.now(timezone.utc)
2021-02-15 05:23:51 +00:00
2021-02-02 10:44:39 +00:00
if (
"queryStringParameters" in event
and "duration" in event["queryStringParameters"]
):
if event["queryStringParameters"]["duration"] in durations:
duration_query = event["queryStringParameters"]["duration"]
else:
return f"Duration must be either {', '.join(durations.keys())}"
2021-02-15 05:23:51 +00:00
if (
"queryStringParameters" in event
and "datetime" in event["queryStringParameters"]
):
2021-03-28 05:00:44 +00:00
requested_time = datetime.fromisoformat(
event["queryStringParameters"]["datetime"].replace("Z", "+00:00")
)
2021-02-15 05:23:51 +00:00
2021-02-02 10:44:39 +00:00
(duration, interval) = durations[duration_query]
if "serial" in event["queryStringParameters"]:
interval = 1
lt = requested_time + timedelta(0, 1)
2021-03-28 05:00:44 +00:00
gte = requested_time - timedelta(0, duration)
2021-02-02 10:44:39 +00:00
path = f"telm-*/_search"
2021-02-02 10:44:39 +00:00
payload = {
"timeout": "30s",
"size": 0,
2021-02-02 10:44:39 +00:00
"aggs": {
"2": {
"terms": {
"field": "serial.keyword",
"size": 10000,
},
"aggs": {
"3": {
"date_histogram": {
"field": "datetime",
"fixed_interval": f"{str(interval)}s",
"min_doc_count": 1,
},
"aggs": {
"1": {
"top_hits": {
"size": 10 if (duration == 0 ) else 1,
2021-07-26 09:51:48 +00:00
"sort": [
{"datetime": {"order": "desc"}},
2021-08-20 13:41:29 +00:00
{"pressure": {"order": "desc","mode" : "median"}}
2021-07-26 09:51:48 +00:00
],
2021-02-02 10:44:39 +00:00
}
}
},
}
},
}
},
"query": {
"bool": {
"filter": [
{"match_all": {}},
{
"range": {
2021-03-28 05:00:44 +00:00
"datetime": {"gte": gte.isoformat(), "lt": lt.isoformat()}
2021-02-02 07:14:38 +00:00
}
2021-02-02 11:09:04 +00:00
},
2021-02-02 10:44:39 +00:00
]
}
},
}
if "queryStringParameters" in event:
if "serial" in event["queryStringParameters"]:
payload["query"]["bool"]["filter"].append(
{
"match_phrase": {
"serial": str(event["queryStringParameters"]["serial"])
}
2021-02-02 07:14:38 +00:00
}
)
2021-12-20 06:02:02 +00:00
results = es.request(json.dumps(payload), path, "POST")
2021-02-02 10:44:39 +00:00
output = {
sonde["key"]: {
2021-07-26 09:51:48 +00:00
data["key_as_string"]: dict(data["1"]["hits"]["hits"][0]["_source"],
uploaders=[ #add additional uploader information
2021-10-27 00:00:05 +00:00
{key:value for key,value in uploader['_source'].items() if key in ["snr","rssi","uploader_callsign", "frequency"]}
2021-07-26 09:51:48 +00:00
for uploader in data["1"]["hits"]["hits"]
])
2021-02-02 10:44:39 +00:00
for data in sonde["3"]["buckets"]
}
for sonde in results["aggregations"]["2"]["buckets"]
}
2021-03-28 05:00:44 +00:00
2021-07-21 09:54:09 +00:00
compressed = BytesIO()
with gzip.GzipFile(fileobj=compressed, mode='w') as f:
json_response = json.dumps(output)
f.write(json_response.encode('utf-8'))
gzippedResponse = compressed.getvalue()
return {
"body": base64.b64encode(gzippedResponse).decode(),
"isBase64Encoded": True,
"statusCode": 200,
"headers": {
"Content-Encoding": "gzip",
"content-type": "application/json"
}
}
def get_listener_telemetry(event, context):
durations = { # ideally we shouldn't need to predefine these, but it's a shit load of data and we don't need want to overload ES
2021-08-20 13:41:29 +00:00
"3d": (259200, 2400), # 3d, 20m
"1d": (86400, 2400), # 1d, 10m
"12h": (43200, 1200), # 1d, 10m
"6h": (21600, 300), # 6h, 1m
"3h": (10800, 120), # 3h, 10s
"1h": (3600, 120),
"30m": (1800, 30),
2021-07-21 09:54:09 +00:00
"1m": (60, 1),
2021-08-20 13:41:29 +00:00
"15s": (15, 1),
"0": (0, 1)
2021-07-21 09:54:09 +00:00
}
duration_query = "3h"
requested_time = datetime.now(timezone.utc)
if (
"queryStringParameters" in event
and "duration" in event["queryStringParameters"]
):
if event["queryStringParameters"]["duration"] in durations:
duration_query = event["queryStringParameters"]["duration"]
else:
return f"Duration must be either {', '.join(durations.keys())}"
if (
"queryStringParameters" in event
and "datetime" in event["queryStringParameters"]
):
requested_time = datetime.fromisoformat(
event["queryStringParameters"]["datetime"].replace("Z", "+00:00")
)
(duration, interval) = durations[duration_query]
2021-10-08 22:50:03 +00:00
if "queryStringParameters" in event and "uploader_callsign" in event["queryStringParameters"]:
2021-07-21 09:54:09 +00:00
interval = 1
lt = requested_time
gte = requested_time - timedelta(0, duration)
path = "listeners-*/_search"
payload = {
"size": 0,
2021-07-21 09:54:09 +00:00
"timeout": "30s",
"aggs": {
"2": {
"terms": {
"field": "uploader_callsign.keyword",
"order": {"_key": "desc"},
"size": 10000,
},
"aggs": {
"3": {
"date_histogram": {
"field": "ts",
"fixed_interval": f"{str(interval)}s",
"min_doc_count": 1,
},
"aggs": {
"1": {
"top_hits": {
# "docvalue_fields": [
# {"field": "position"},
# {"field": "alt"},
# {"field": "datetime"},
# ],
# "_source": "position",
"size": 1,
"sort": [{"ts": {"order": "desc"}}],
}
}
},
}
},
}
},
"query": {
"bool": {
"filter": [
{"match_all": {}},
2021-10-08 22:50:03 +00:00
{"exists": { "field": "uploader_position"}},
2021-07-21 09:54:09 +00:00
{
"range": {
"ts": {"gte": gte.isoformat(), "lt": lt.isoformat()}
}
},
]
}
},
}
if "queryStringParameters" in event:
if "uploader_callsign" in event["queryStringParameters"]:
payload["query"]["bool"]["filter"].append(
{
"match_phrase": {
"uploader_callsign": str(event["queryStringParameters"]["uploader_callsign"])
}
}
)
2021-12-20 06:02:02 +00:00
results = es.request(json.dumps(payload), path, "POST")
2021-07-21 09:54:09 +00:00
output = {
sonde["key"]: {
data["key_as_string"]: data["1"]["hits"]["hits"][0]["_source"]
for data in sonde["3"]["buckets"]
}
for sonde in results["aggregations"]["2"]["buckets"]
}
compressed = BytesIO()
with gzip.GzipFile(fileobj=compressed, mode='w') as f:
json_response = json.dumps(output)
f.write(json_response.encode('utf-8'))
gzippedResponse = compressed.getvalue()
return {
"body": base64.b64encode(gzippedResponse).decode(),
"isBase64Encoded": True,
"statusCode": 200,
"headers": {
"Content-Encoding": "gzip",
"content-type": "application/json"
}
}
2021-03-28 05:00:44 +00:00
2021-10-04 10:55:51 +00:00
def get_sites(event, context):
path = "sites/_search"
payload = {
"version": True,
"size": 10000,
"_source": {
"excludes": []
},
"query": {
"bool": {
"filter": [
{
"match_all": {}
}
]
}
}
}
if "queryStringParameters" in event:
if "station" in event["queryStringParameters"]:
payload["query"]["bool"]["filter"].append(
{
"match_phrase": {
"station": str(event["queryStringParameters"]["station"])
}
}
)
2021-12-20 06:02:02 +00:00
results = es.request(json.dumps(payload), path, "POST")
2021-10-04 10:55:51 +00:00
output = {x['_source']['station']: x['_source'] for x in results['hits']['hits']}
compressed = BytesIO()
with gzip.GzipFile(fileobj=compressed, mode='w') as f:
json_response = json.dumps(output)
f.write(json_response.encode('utf-8'))
gzippedResponse = compressed.getvalue()
return {
"body": base64.b64encode(gzippedResponse).decode(),
"isBase64Encoded": True,
"statusCode": 200,
"headers": {
"Content-Encoding": "gzip",
"content-type": "application/json"
}
}
2021-02-22 06:13:30 +00:00
2021-02-02 07:14:38 +00:00
2021-07-21 09:54:09 +00:00
2022-01-17 00:34:56 +00:00
def telm_stats(event, context):
path = "telm-*/_search"
payload = {
"aggs": {
2022-02-27 01:57:24 +00:00
"total_unique_callsigns": {
"cardinality": {
"field": "uploader_callsign.keyword"
}
2022-01-17 00:34:56 +00:00
},
2022-02-27 01:57:24 +00:00
"software_name": {
2022-01-17 00:34:56 +00:00
"terms": {
2022-02-27 01:57:24 +00:00
"field": "software_name.keyword",
2022-01-17 00:34:56 +00:00
"order": {
2022-02-27 01:57:24 +00:00
"unique_callsigns": "desc"
2022-01-17 00:34:56 +00:00
},
"size": 10
},
"aggs": {
"unique_callsigns": {
2022-02-27 01:57:24 +00:00
"cardinality": {
"field": "uploader_callsign.keyword"
}
},
"software_version": {
"terms": {
"field": "software_version.keyword",
"order": {
"unique_callsigns": "desc"
},
"size": 10
},
"aggs": {
"unique_callsigns": {
"cardinality": {
"field": "uploader_callsign.keyword"
}
}
}
2022-01-17 00:34:56 +00:00
}
}
}
},
"size": 0,
2022-02-27 01:57:24 +00:00
"track_total_hits": True,
2022-01-17 00:34:56 +00:00
"query": {
"bool": {
"must": [],
"filter": [
{
"range": {
"datetime": {
"gte": "now-7d",
"lte": "now",
"format": "strict_date_optional_time"
}
}
}
]
}
}
}
results = es.request(json.dumps(payload), path, "POST")
output = {
x['key']: {
"telemetry_count": x["doc_count"],
"unique_callsigns": x["unique_callsigns"]["value"],
"versions": {
y["key"]: {
"telemetry_count": y["doc_count"],
"unique_callsigns": y["unique_callsigns"]["value"]
}
for y in x['software_version']['buckets']
}
}
for x in results['aggregations']['software_name']['buckets']
}
2022-02-27 01:57:24 +00:00
output['totals'] = {
"unique_callsigns": results['aggregations']['total_unique_callsigns']['value'],
"telemetry_count": results['hits']['total']['value']
}
2022-01-17 00:34:56 +00:00
compressed = BytesIO()
with gzip.GzipFile(fileobj=compressed, mode='w') as f:
json_response = json.dumps(output)
f.write(json_response.encode('utf-8'))
gzippedResponse = compressed.getvalue()
return {
"body": base64.b64encode(gzippedResponse).decode(),
"isBase64Encoded": True,
"statusCode": 200,
"headers": {
"Content-Encoding": "gzip",
"content-type": "application/json"
}
}