sondehub-infra/predict/lambda_function.py

357 lines
13 KiB
Python
Raw Normal View History

2021-02-22 06:13:30 +00:00
import boto3
import botocore.credentials
from botocore.awsrequest import AWSRequest
from botocore.endpoint import URLLib3Session
from botocore.auth import SigV4Auth
import json
import os
from datetime import datetime, timedelta, timezone
import sys, traceback
import http.client
2021-03-22 09:25:58 +00:00
import math
2021-04-04 00:47:31 +00:00
import logging
2021-02-22 06:13:30 +00:00
2021-07-18 22:38:31 +00:00
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.DEBUG)
2021-02-22 06:13:30 +00:00
HOST = os.getenv("ES")
2021-04-04 00:47:31 +00:00
2021-03-22 09:25:58 +00:00
def getDensity(altitude):
"""
Calculate the atmospheric density for a given altitude in metres.
This is a direct port of the oziplotter Atmosphere class
"""
# Constants
airMolWeight = 28.9644 # Molecular weight of air
densitySL = 1.225 # Density at sea level [kg/m3]
pressureSL = 101325 # Pressure at sea level [Pa]
temperatureSL = 288.15 # Temperature at sea level [deg K]
gamma = 1.4
gravity = 9.80665 # Acceleration of gravity [m/s2]
tempGrad = -0.0065 # Temperature gradient [deg K/m]
RGas = 8.31432 # Gas constant [kg/Mol/K]
R = 287.053
deltaTemperature = 0.0
# Lookup Tables
altitudes = [0, 11000, 20000, 32000, 47000, 51000, 71000, 84852]
pressureRels = [
1,
2.23361105092158e-1,
5.403295010784876e-2,
8.566678359291667e-3,
1.0945601337771144e-3,
6.606353132858367e-4,
3.904683373343926e-5,
3.6850095235747942e-6,
]
temperatures = [288.15, 216.65, 216.65, 228.65, 270.65, 270.65, 214.65, 186.946]
tempGrads = [-6.5, 0, 1, 2.8, 0, -2.8, -2, 0]
gMR = gravity * airMolWeight / RGas
# Pick a region to work in
i = 0
if altitude > 0:
while altitude > altitudes[i + 1]:
i = i + 1
# Lookup based on region
baseTemp = temperatures[i]
tempGrad = tempGrads[i] / 1000.0
pressureRelBase = pressureRels[i]
deltaAltitude = altitude - altitudes[i]
temperature = baseTemp + tempGrad * deltaAltitude
# Calculate relative pressure
if math.fabs(tempGrad) < 1e-10:
pressureRel = pressureRelBase * math.exp(
-1 * gMR * deltaAltitude / 1000.0 / baseTemp
)
else:
pressureRel = pressureRelBase * math.pow(
baseTemp / temperature, gMR / tempGrad / 1000.0
)
# Add temperature offset
temperature = temperature + deltaTemperature
# Finally, work out the density...
speedOfSound = math.sqrt(gamma * R * temperature)
pressure = pressureRel * pressureSL
density = densitySL * pressureRel * temperatureSL / temperature
return density
def seaLevelDescentRate(descent_rate, altitude):
""" Calculate the descent rate at sea level, for a given descent rate at altitude """
rho = getDensity(altitude)
return math.sqrt((rho / 1.225) * math.pow(descent_rate, 2))
2021-02-22 06:13:30 +00:00
def predict(event, context):
path = "telm-*/_search"
payload = {
"aggs": {
"2": {
"terms": {
"field": "serial.keyword",
"order": {
"_key": "desc"
},
"size": 1000
},
"aggs": {
"3": {
"date_histogram": {
"field": "datetime",
2021-03-22 08:29:37 +00:00
"fixed_interval": "5s"
2021-02-22 06:13:30 +00:00
},
"aggs": {
"1": {
"top_hits": {
"docvalue_fields": [
{
"field": "alt"
}
],
"_source": "alt",
"size": 1,
"sort": [
{
"datetime": {
"order": "desc"
}
}
]
}
},
"4": {
"serial_diff": {
2021-03-22 08:29:37 +00:00
"buckets_path": "4-metric",
"gap_policy": "skip",
"lag": 5
2021-02-22 06:13:30 +00:00
}
},
"5": {
"top_hits": {
"docvalue_fields": [
{
"field": "position"
}
],
"_source": "position",
"size": 1,
"sort": [
{
"datetime": {
"order": "desc"
}
}
]
}
},
"4-metric": {
"avg": {
"field": "alt"
}
}
}
}
}
}
},
"size": 0,
"stored_fields": [
"*"
],
"script_fields": {},
"docvalue_fields": [
{
"field": "@timestamp",
"format": "date_time"
},
{
"field": "datetime",
"format": "date_time"
},
{
"field": "log_date",
"format": "date_time"
},
{
"field": "time_received",
"format": "date_time"
},
{
"field": "time_server",
"format": "date_time"
},
{
"field": "time_uploaded",
"format": "date_time"
}
],
"_source": {
"excludes": []
},
"query": {
"bool": {
"must": [],
"filter": [
{
"match_all": {}
},
{
"range": {
"datetime": {
2021-07-18 22:38:31 +00:00
"gte": "now-10m",
2021-02-22 06:13:30 +00:00
"lte": "now",
"format": "strict_date_optional_time"
}
}
}
],
"should": [],
2021-03-22 08:29:37 +00:00
"must_not": [
{
"match_phrase": {
"software_name": "SondehubV1"
}
}
]
2021-02-22 06:13:30 +00:00
}
2021-07-18 22:38:31 +00:00
},
"size": 0
2021-02-22 06:13:30 +00:00
}
if "queryStringParameters" in event:
2021-03-13 11:19:52 +00:00
if "vehicles" in event["queryStringParameters"] and event["queryStringParameters"]["vehicles"] != "RS_*;*chase" and event["queryStringParameters"]["vehicles"] != "":
2021-02-22 06:13:30 +00:00
payload["query"]["bool"]["filter"].append(
{
"match_phrase": {
"serial": str(event["queryStringParameters"]["vehicles"])
}
}
)
2021-07-18 22:38:31 +00:00
payload['query']['bool']['filter'][1]['range']['datetime']['gte'] = 'now-6h' # for single sonde allow longer predictions
2021-04-04 00:47:31 +00:00
logging.debug("Start ES Request")
2021-02-22 06:13:30 +00:00
results = es_request(payload, path, "GET")
2021-04-04 00:47:31 +00:00
logging.debug("Finished ES Request")
2021-02-22 06:13:30 +00:00
serials = { }
for x in results['aggregations']['2']['buckets']:
try:
serials[x['key']] = {
"alt": sorted(x['3']['buckets'], key=lambda k: k['key_as_string'])[-1]['1']['hits']['hits'][0]['fields']['alt'][0],
"position": sorted(x['3']['buckets'], key=lambda k: k['key_as_string'])[-1]['5']['hits']['hits'][0]['fields']['position'][0].split(","),
2021-03-22 08:29:37 +00:00
"rate": sorted(x['3']['buckets'], key=lambda k: k['key_as_string'])[-1]['4']['value']/25, # as we bucket for every 5 seconds with a lag of 5
2021-02-22 06:13:30 +00:00
"time": sorted(x['3']['buckets'], key=lambda k: k['key_as_string'])[-1]['key_as_string']
}
except:
pass
conn = http.client.HTTPSConnection("tawhiri.v2.sondehub.org")
2021-02-22 06:13:30 +00:00
serial_data={}
2021-04-04 00:47:31 +00:00
logging.debug("Start Predict")
2021-02-22 06:13:30 +00:00
for serial in serials:
2021-03-29 05:47:39 +00:00
2021-02-22 06:13:30 +00:00
value = serials[serial]
2021-03-29 05:47:39 +00:00
ascent_rate=value['rate'] if value['rate'] > 0.5 else 5 # this shouldn't really be used but it makes the API happy
2021-03-22 09:25:58 +00:00
descent_rate= seaLevelDescentRate(abs(value['rate']),value['alt']) if value['rate'] < 0 else 6
2021-03-29 05:47:39 +00:00
if descent_rate < 0.5:
continue
2021-03-14 07:02:07 +00:00
if value['rate'] < 0:
burst_altitude = value['alt']+0.05
else:
burst_altitude = (value['alt']+0.05) if value['alt'] > 26000 else 26000
2021-04-04 00:47:31 +00:00
longitude = float(value['position'][1].strip())
if longitude < 0:
longitude += 360
2021-06-18 23:28:10 +00:00
url = f"/api/v1/?launch_latitude={value['position'][0].strip()}&launch_longitude={longitude}&launch_datetime={value['time']}&launch_altitude={value['alt']:.2f}&ascent_rate={ascent_rate:.2f}&burst_altitude={burst_altitude:.2f}&descent_rate={descent_rate:.2f}"
2021-04-04 00:47:31 +00:00
2021-03-28 04:53:03 +00:00
conn.request("GET", url
2021-02-22 06:13:30 +00:00
)
res = conn.getresponse()
data = res.read()
2021-07-18 22:38:31 +00:00
if res.code != 200:
logging.debug(data)
2021-02-22 06:13:30 +00:00
serial_data[serial] = json.loads(data.decode("utf-8"))
2021-04-04 00:47:31 +00:00
logging.debug("Stop Predict")
2021-02-22 06:13:30 +00:00
output = []
for serial in serial_data:
value = serial_data[serial]
data = []
2021-07-18 22:38:31 +00:00
if 'prediction' in value:
for stage in value['prediction']:
if stage['stage'] == 'ascent' and serials[serial]['rate'] < 0: # ignore ascent stage if we have already burst
continue
else:
for item in stage['trajectory']:
data.append({
"time": int(datetime.fromisoformat(item['datetime'].split(".")[0].replace("Z","")).timestamp()),
"lat": item['latitude'],
"lon": item['longitude'] - 360 if item['longitude'] > 180 else item['longitude'],
"alt": item['altitude'],
})
output.append({
"vehicle": serial,
"time": value['request']['launch_datetime'],
"latitude": value['request']['launch_latitude'],
"longitude": value['request']['launch_longitude'],
"altitude": value['request']['launch_altitude'],
"ascent_rate":value['request']['ascent_rate'],
"descent_rate":value['request']['descent_rate'],
"burst_altitude": value['request']['burst_altitude'],
"descending": 1 if serials[serial]['rate'] < 0 else 0,
"landed": 0,
"data": json.dumps(data)
})
2021-04-04 00:47:31 +00:00
logging.debug("Finished")
2021-02-22 06:13:30 +00:00
return json.dumps(output)
def es_request(payload, path, method):
# get aws creds
session = boto3.Session()
params = json.dumps(payload)
headers = {"Host": HOST, "Content-Type": "application/json"}
request = AWSRequest(
method="POST", url=f"https://{HOST}/{path}", data=params, headers=headers
)
SigV4Auth(boto3.Session().get_credentials(), "es", "us-east-1").add_auth(request)
session = URLLib3Session()
r = session.send(request.prepare())
return json.loads(r.text)
if __name__ == "__main__":
# print(get_sondes({"queryStringParameters":{"lat":"-28.22717","lon":"153.82996","distance":"50000"}}, {}))
# mode: 6hours
# type: positions
# format: json
# max_positions: 0
# position_id: 0
# vehicles: RS_*;*chase
2021-06-18 07:15:39 +00:00
print(predict(
2021-03-22 08:29:37 +00:00
{"queryStringParameters" : {
2021-07-18 22:38:31 +00:00
# "vehicles": "S4610686"
2021-03-22 08:29:37 +00:00
}},{}
2021-06-18 07:15:39 +00:00
))
2021-04-04 00:47:31 +00:00
2021-02-22 06:13:30 +00:00
# get list of sondes, serial, lat,lon, alt
# and current rate
# for each one, request http://predict.cusf.co.uk/api/v1/?launch_latitude=-37.8136&launch_longitude=144.9631&launch_datetime=2021-02-22T00:15:18.513413Z&launch_altitude=30000&ascent_rate=5&burst_altitude=30000.1&descent_rate=5
# have to set the burst alt slightly higher than the launch