2021-06-06 01:19:58 +00:00
|
|
|
import json
|
|
|
|
|
|
|
|
import zlib
|
|
|
|
import base64
|
2021-12-20 01:10:24 +00:00
|
|
|
from datetime import datetime, timedelta
|
2021-12-20 06:02:02 +00:00
|
|
|
import es
|
2022-01-01 06:38:00 +00:00
|
|
|
import boto3
|
|
|
|
import botocore.exceptions
|
2021-06-06 01:19:58 +00:00
|
|
|
|
2022-01-01 06:38:00 +00:00
|
|
|
def sondeExists(serial):
|
2021-06-06 01:19:58 +00:00
|
|
|
query = {
|
|
|
|
"aggs": {
|
|
|
|
"1": {
|
|
|
|
"top_hits": {
|
|
|
|
"docvalue_fields": [
|
|
|
|
{
|
|
|
|
"field": "position"
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"field": "alt"
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"_source": "position",
|
|
|
|
"size": 1,
|
|
|
|
"sort": [
|
|
|
|
{
|
|
|
|
"datetime": {
|
|
|
|
"order": "desc"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
"query": {
|
|
|
|
"bool": {
|
|
|
|
"filter": [
|
|
|
|
{
|
|
|
|
"match_phrase": {
|
|
|
|
"serial.keyword": serial
|
|
|
|
}
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-12-20 06:02:02 +00:00
|
|
|
results = es.request(json.dumps(query), "telm-*/_search", "POST")
|
2022-01-01 06:38:00 +00:00
|
|
|
if len(results["aggregations"]["1"]["hits"]["hits"]) > 0:
|
|
|
|
return True
|
|
|
|
|
|
|
|
# if there's a historic file created for this sonde, use that instead
|
|
|
|
try:
|
|
|
|
s3 = boto3.resource('s3')
|
|
|
|
object = s3.Object('sondehub-history', f'serial/{serial}.json.gz')
|
|
|
|
|
|
|
|
object.load()
|
|
|
|
return True
|
|
|
|
except botocore.exceptions.ClientError as e:
|
|
|
|
if e.response['Error']['Code'] == "404":
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
# Something else has gone wrong.
|
|
|
|
raise
|
2021-06-06 01:19:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
def getRecovered(serial):
|
|
|
|
query = {
|
|
|
|
"aggs": {
|
|
|
|
"1": {
|
|
|
|
"top_hits": {
|
|
|
|
"docvalue_fields": [
|
|
|
|
{
|
|
|
|
"field": "recovered_by.keyword"
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"size": 1,
|
|
|
|
"sort": [
|
|
|
|
{
|
|
|
|
"datetime": {
|
|
|
|
"order": "desc"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
"query": {
|
|
|
|
"bool": {
|
|
|
|
"filter": [
|
|
|
|
{
|
|
|
|
"match_phrase": {
|
|
|
|
"serial.keyword": serial
|
|
|
|
}
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"match_phrase": {
|
|
|
|
"recovered": True
|
|
|
|
}
|
|
|
|
},
|
|
|
|
]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-12-20 06:02:02 +00:00
|
|
|
results = es.request(json.dumps(query), "recovered*/_search", "POST")
|
2021-06-06 01:19:58 +00:00
|
|
|
return results["aggregations"]["1"]["hits"]["hits"]
|
|
|
|
|
|
|
|
|
|
|
|
def put(event, context):
|
|
|
|
if "isBase64Encoded" in event and event["isBase64Encoded"] == True:
|
|
|
|
event["body"] = base64.b64decode(event["body"])
|
|
|
|
if (
|
|
|
|
"content-encoding" in event["headers"]
|
|
|
|
and event["headers"]["content-encoding"] == "gzip"
|
|
|
|
):
|
|
|
|
event["body"] = zlib.decompress(event["body"], 16 + zlib.MAX_WBITS)
|
|
|
|
time_delta = None
|
|
|
|
if "date" in event["headers"]:
|
|
|
|
try:
|
|
|
|
time_delta_header = event["headers"]["date"]
|
|
|
|
time_delta = (
|
2021-12-20 01:10:24 +00:00
|
|
|
datetime(*parsedate(time_delta_header)[:7])
|
|
|
|
- datetime.utcnow()
|
2021-06-06 01:19:58 +00:00
|
|
|
).total_seconds()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
recovered = json.loads(event["body"])
|
|
|
|
|
2021-06-06 01:48:23 +00:00
|
|
|
if "datetime" not in recovered:
|
2021-12-20 01:10:24 +00:00
|
|
|
recovered["datetime"] = datetime.now().isoformat()
|
2021-06-06 01:48:23 +00:00
|
|
|
|
2021-06-08 08:52:17 +00:00
|
|
|
if recovered["serial"] == "":
|
|
|
|
return {"statusCode": 400, "body": json.dumps({"message": "serial cannot be empty"})}
|
|
|
|
|
2022-01-01 06:38:00 +00:00
|
|
|
if not sondeExists(recovered["serial"]):
|
2021-06-08 08:52:17 +00:00
|
|
|
return {"statusCode": 400, "body": json.dumps({"message": "serial not found in db"})}
|
2021-06-06 01:19:58 +00:00
|
|
|
|
2022-01-07 05:12:59 +00:00
|
|
|
recovered['position'] = [float(recovered['lon']), float(recovered['lat'])]
|
2021-06-06 01:19:58 +00:00
|
|
|
|
2021-12-20 06:02:02 +00:00
|
|
|
result = es.request(json.dumps(recovered), "recovered/_doc", "POST")
|
2021-06-06 01:19:58 +00:00
|
|
|
|
|
|
|
# add in elasticsearch extra position field
|
2022-01-01 02:44:31 +00:00
|
|
|
return {"statusCode": 200, "body": json.dumps({"message": "Recovery logged. Have a good day ^_^"})}
|
2021-06-06 01:19:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get(event, context):
|
|
|
|
filters = []
|
2021-12-20 00:04:26 +00:00
|
|
|
should = []
|
2021-06-06 01:19:58 +00:00
|
|
|
last = 259200
|
2021-12-20 00:04:26 +00:00
|
|
|
serials = None
|
2021-06-06 01:19:58 +00:00
|
|
|
lat = None
|
|
|
|
lon = None
|
|
|
|
distance = None
|
|
|
|
|
|
|
|
# grab query parameters
|
|
|
|
if "queryStringParameters" in event:
|
|
|
|
if "last" in event["queryStringParameters"]:
|
|
|
|
last = int(event['queryStringParameters']['last'])
|
|
|
|
if "serial" in event["queryStringParameters"]:
|
2021-12-20 00:04:26 +00:00
|
|
|
serials = event['queryStringParameters']['serial'].split(",")
|
2022-01-08 23:10:01 +00:00
|
|
|
if "last" not in event["queryStringParameters"] and "serial" in event["queryStringParameters"]:
|
|
|
|
last = 0
|
2021-06-06 01:19:58 +00:00
|
|
|
if "lat" in event["queryStringParameters"]:
|
|
|
|
lat = float(event["queryStringParameters"]['lat'])
|
|
|
|
if "lon" in event["queryStringParameters"]:
|
|
|
|
lon = float(event["queryStringParameters"]['lon'])
|
|
|
|
if "distance" in event["queryStringParameters"]:
|
|
|
|
distance = int(event["queryStringParameters"]['distance'])
|
|
|
|
|
|
|
|
if last != 0:
|
|
|
|
filters.append(
|
|
|
|
{
|
|
|
|
"range": {
|
|
|
|
"datetime": {
|
|
|
|
"gte": f"now-{last}s",
|
|
|
|
"lte": "now",
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
2021-12-20 00:04:26 +00:00
|
|
|
if serials:
|
|
|
|
for serial in serials:
|
|
|
|
should.append(
|
|
|
|
{
|
|
|
|
"match_phrase": {
|
|
|
|
"serial.keyword": serial
|
|
|
|
}
|
2021-06-06 01:19:58 +00:00
|
|
|
}
|
2021-12-20 00:04:26 +00:00
|
|
|
)
|
2021-06-06 01:19:58 +00:00
|
|
|
if lat and lon and distance:
|
|
|
|
filters.append(
|
|
|
|
{
|
|
|
|
"geo_distance": {
|
|
|
|
"distance": f"{distance}m",
|
|
|
|
"position": {
|
|
|
|
"lat": lat,
|
|
|
|
"lon": lon,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
query = {
|
|
|
|
"query": {
|
|
|
|
"bool": {
|
2021-12-20 00:04:26 +00:00
|
|
|
"filter": filters,
|
|
|
|
"should": should,
|
2021-06-06 01:19:58 +00:00
|
|
|
}
|
2021-06-08 08:52:17 +00:00
|
|
|
},
|
|
|
|
"aggs": {
|
|
|
|
"2": {
|
|
|
|
"terms": {
|
|
|
|
"field": "serial.keyword",
|
|
|
|
"order": {
|
|
|
|
"2-orderAgg": "desc"
|
|
|
|
},
|
|
|
|
"size": 500
|
|
|
|
},
|
|
|
|
"aggs": {
|
|
|
|
"2-orderAgg": {
|
|
|
|
"max": {
|
|
|
|
"field": "datetime"
|
|
|
|
},
|
|
|
|
},
|
|
|
|
"1": {
|
|
|
|
"top_hits": {
|
|
|
|
"_source": True,
|
|
|
|
"size": 1,
|
|
|
|
"sort": [
|
|
|
|
{
|
|
|
|
"recovered": {
|
|
|
|
"order": "desc"
|
|
|
|
}
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"datetime": {
|
|
|
|
"order": "desc"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-06-06 01:19:58 +00:00
|
|
|
}
|
|
|
|
}
|
2021-12-20 00:04:26 +00:00
|
|
|
if serials:
|
|
|
|
query["query"]["bool"]["minimum_should_match"] = 1
|
2021-12-20 06:02:02 +00:00
|
|
|
results = es.request(json.dumps(query), "recovered*/_search", "POST")
|
2021-06-08 08:52:17 +00:00
|
|
|
output = [x['1']['hits']['hits'][0]["_source"]
|
|
|
|
for x in results['aggregations']['2']['buckets']]
|
2021-06-06 01:19:58 +00:00
|
|
|
return {"statusCode": 200, "body": json.dumps(output)}
|
|
|
|
|
|
|
|
|
2021-12-20 01:10:24 +00:00
|
|
|
def stats(event, context):
|
|
|
|
filters = []
|
|
|
|
should = []
|
|
|
|
duration = 0
|
|
|
|
serials = None
|
|
|
|
lat = None
|
|
|
|
lon = None
|
|
|
|
distance = None
|
|
|
|
requested_time = None
|
|
|
|
|
|
|
|
# grab query parameters
|
|
|
|
if "queryStringParameters" in event:
|
|
|
|
if "duration" in event["queryStringParameters"]:
|
|
|
|
duration = int(event['queryStringParameters']['duration'])
|
|
|
|
if "lat" in event["queryStringParameters"]:
|
|
|
|
lat = float(event["queryStringParameters"]['lat'])
|
|
|
|
if "lon" in event["queryStringParameters"]:
|
|
|
|
lon = float(event["queryStringParameters"]['lon'])
|
|
|
|
if "distance" in event["queryStringParameters"]:
|
|
|
|
distance = int(event["queryStringParameters"]['distance'])
|
|
|
|
if "datetime" in event["queryStringParameters"]:
|
|
|
|
requested_time = datetime.fromisoformat(
|
|
|
|
event["queryStringParameters"]["datetime"].replace("Z", "+00:00")
|
|
|
|
)
|
|
|
|
if duration != 0:
|
|
|
|
if requested_time:
|
|
|
|
lt = requested_time + timedelta(0, 1)
|
|
|
|
gte = requested_time - timedelta(0, duration)
|
|
|
|
filters.append(
|
|
|
|
{
|
|
|
|
"range": {
|
|
|
|
"datetime": {"gte": gte.isoformat(), "lt": lt.isoformat()}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
2021-12-20 02:23:41 +00:00
|
|
|
else:
|
|
|
|
lt = datetime.now() + timedelta(0, 1)
|
|
|
|
gte = datetime.now() - timedelta(0, duration)
|
|
|
|
filters.append(
|
|
|
|
{
|
|
|
|
"range": {
|
|
|
|
"datetime": {"gte": gte.isoformat(), "lt": lt.isoformat()}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
2021-12-20 01:10:24 +00:00
|
|
|
if lat and lon and distance:
|
|
|
|
filters.append(
|
|
|
|
{
|
|
|
|
"geo_distance": {
|
|
|
|
"distance": f"{distance}m",
|
|
|
|
"position": {
|
|
|
|
"lat": lat,
|
|
|
|
"lon": lon,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
query = {
|
|
|
|
"query": {
|
|
|
|
"bool": {
|
|
|
|
"filter": filters,
|
|
|
|
"should": should,
|
|
|
|
}
|
|
|
|
},
|
|
|
|
"aggs": {
|
|
|
|
"chaser_count": {
|
|
|
|
"cardinality": {
|
|
|
|
"field": "recovered_by.keyword"
|
|
|
|
}
|
|
|
|
},
|
|
|
|
"breakdown": {
|
|
|
|
"terms": {
|
|
|
|
"field": "recovered",
|
|
|
|
"order": {
|
|
|
|
"counts": "desc"
|
|
|
|
},
|
|
|
|
"size": 5
|
|
|
|
},
|
|
|
|
"aggs": {
|
|
|
|
"counts": {
|
|
|
|
"cardinality": {
|
|
|
|
"field": "serial.keyword"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
"top_recovered": {
|
|
|
|
"terms": {
|
|
|
|
"field": "recovered_by.keyword",
|
|
|
|
"order": {
|
|
|
|
"recovered_by": "desc"
|
|
|
|
},
|
|
|
|
"size": 5
|
|
|
|
},
|
|
|
|
"aggs": {
|
|
|
|
"recovered_by": {
|
|
|
|
"cardinality": {
|
|
|
|
"field": "serial.keyword"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
"total_count": {
|
|
|
|
"cardinality": {
|
|
|
|
"field": "serial.keyword"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-12-20 06:02:02 +00:00
|
|
|
results = es.request(json.dumps(query), "recovered*/_search", "POST")
|
2021-12-20 01:10:24 +00:00
|
|
|
|
|
|
|
output = {
|
|
|
|
"total": 0,
|
|
|
|
"recovered": 0,
|
|
|
|
"failed": 0,
|
|
|
|
"chaser_count": 0,
|
|
|
|
"top_chasers": {}
|
|
|
|
}
|
|
|
|
try:
|
|
|
|
output['total'] = results['aggregations']['total_count']['value']
|
|
|
|
except:
|
|
|
|
output['total'] = 0
|
|
|
|
stats = { x['key_as_string'] : x['counts']['value'] for x in results['aggregations']['breakdown']['buckets']}
|
|
|
|
try:
|
|
|
|
output['recovered'] = stats['true']
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
|
|
|
output['failed'] = stats['false']
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
|
|
|
output['chaser_count'] = results['aggregations']['chaser_count']['value']
|
|
|
|
except:
|
|
|
|
output['chaser_count'] = 0
|
|
|
|
|
|
|
|
try:
|
|
|
|
output['top_chasers'] = { x['key'] : x['recovered_by']['value'] for x in results['aggregations']['top_recovered']['buckets']}
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return {"statusCode": 200, "body": json.dumps(output)}
|
|
|
|
|
|
|
|
|