mirror of
https://github.com/projecthorus/sondehub-infra.git
synced 2025-02-20 17:12:50 +00:00
added legacy endpoints
This commit is contained in:
parent
084cef93b2
commit
1e0cb864bf
@ -5,6 +5,8 @@ from datetime import datetime, timedelta
|
||||
import threading
|
||||
from queue import Queue
|
||||
import queue
|
||||
from botocore import UNSIGNED
|
||||
from botocore.config import Config
|
||||
|
||||
S3_BUCKET = "sondehub-open-data"
|
||||
|
||||
@ -18,7 +20,7 @@ class Downloader(threading.Thread): # Stolen from the SDK, if I wasn't lazy I'd
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def run(self):
|
||||
s3 = boto3.client("s3")
|
||||
s3 = boto3.client("s3", config=Config(signature_version=UNSIGNED))
|
||||
while True:
|
||||
try:
|
||||
task = self.tasks_to_accomplish.get_nowait()
|
||||
@ -35,7 +37,7 @@ class Downloader(threading.Thread): # Stolen from the SDK, if I wasn't lazy I'd
|
||||
def download(serial):
|
||||
prefix_filter = f"serial-hashed/{serial}/"
|
||||
|
||||
s3 = boto3.resource("s3")
|
||||
s3 = boto3.resource("s3", config=Config(signature_version=UNSIGNED))
|
||||
bucket = s3.Bucket(S3_BUCKET)
|
||||
data = []
|
||||
|
||||
|
184
main.tf
184
main.tf
@ -476,6 +476,12 @@ data "archive_file" "sign_socket" {
|
||||
output_path = "${path.module}/build/sign_socket.zip"
|
||||
}
|
||||
|
||||
data "archive_file" "predictions" {
|
||||
type = "zip"
|
||||
source_file = "predict/lambda_function.py"
|
||||
output_path = "${path.module}/build/predictions.zip"
|
||||
}
|
||||
|
||||
resource "aws_lambda_function" "LambdaFunction" {
|
||||
function_name = "sonde-api-to-iot-core"
|
||||
handler = "lambda_function.lambda_handler"
|
||||
@ -524,6 +530,78 @@ resource "aws_lambda_function" "get_sondes" {
|
||||
]
|
||||
}
|
||||
|
||||
resource "aws_lambda_function" "listeners" {
|
||||
function_name = "listeners"
|
||||
handler = "lambda_function.get_listeners"
|
||||
filename = "${path.module}/build/query.zip"
|
||||
source_code_hash = data.archive_file.query.output_base64sha256
|
||||
publish = true
|
||||
memory_size = 256
|
||||
role = aws_iam_role.IAMRole5.arn
|
||||
runtime = "python3.7"
|
||||
timeout = 10
|
||||
tracing_config {
|
||||
mode = "Active"
|
||||
}
|
||||
environment {
|
||||
variables = {
|
||||
"ES" = "es.${local.domain_name}"
|
||||
}
|
||||
}
|
||||
layers = [
|
||||
"arn:aws:lambda:us-east-1:${data.aws_caller_identity.current.account_id}:layer:xray-python:1",
|
||||
"arn:aws:lambda:us-east-1:${data.aws_caller_identity.current.account_id}:layer:iot:3"
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
resource "aws_lambda_function" "datanew" {
|
||||
function_name = "datanew"
|
||||
handler = "lambda_function.datanew"
|
||||
filename = "${path.module}/build/query.zip"
|
||||
source_code_hash = data.archive_file.query.output_base64sha256
|
||||
publish = true
|
||||
memory_size = 256
|
||||
role = aws_iam_role.IAMRole5.arn
|
||||
runtime = "python3.7"
|
||||
timeout = 10
|
||||
tracing_config {
|
||||
mode = "Active"
|
||||
}
|
||||
environment {
|
||||
variables = {
|
||||
"ES" = "es.${local.domain_name}"
|
||||
}
|
||||
}
|
||||
layers = [
|
||||
"arn:aws:lambda:us-east-1:${data.aws_caller_identity.current.account_id}:layer:xray-python:1",
|
||||
"arn:aws:lambda:us-east-1:${data.aws_caller_identity.current.account_id}:layer:iot:3"
|
||||
]
|
||||
}
|
||||
|
||||
resource "aws_lambda_function" "predictions" {
|
||||
function_name = "predictions"
|
||||
handler = "lambda_function.predict"
|
||||
filename = "${path.module}/build/predictions.zip"
|
||||
source_code_hash = data.archive_file.predictions.output_base64sha256
|
||||
publish = true
|
||||
memory_size = 256
|
||||
role = aws_iam_role.IAMRole5.arn
|
||||
runtime = "python3.7"
|
||||
timeout = 10
|
||||
tracing_config {
|
||||
mode = "Active"
|
||||
}
|
||||
environment {
|
||||
variables = {
|
||||
"ES" = "es.${local.domain_name}"
|
||||
}
|
||||
}
|
||||
layers = [
|
||||
"arn:aws:lambda:us-east-1:${data.aws_caller_identity.current.account_id}:layer:xray-python:1",
|
||||
"arn:aws:lambda:us-east-1:${data.aws_caller_identity.current.account_id}:layer:iot:3"
|
||||
]
|
||||
}
|
||||
|
||||
resource "aws_lambda_function" "get_telem" {
|
||||
function_name = "get_telem"
|
||||
@ -596,35 +674,57 @@ resource "aws_lambda_permission" "sign_socket" {
|
||||
action = "lambda:InvokeFunction"
|
||||
function_name = aws_lambda_function.sign_socket.arn
|
||||
principal = "apigateway.amazonaws.com"
|
||||
source_arn = "arn:aws:execute-api:us-east-1:${data.aws_caller_identity.current.account_id}:r03szwwq41/*/*/sondes/websocket"
|
||||
source_arn = "arn:aws:execute-api:us-east-1:${data.aws_caller_identity.current.account_id}:${aws_apigatewayv2_api.ApiGatewayV2Api.id}/*/*/sondes/websocket"
|
||||
}
|
||||
|
||||
resource "aws_lambda_permission" "history" {
|
||||
action = "lambda:InvokeFunction"
|
||||
function_name = aws_lambda_function.history.arn
|
||||
principal = "apigateway.amazonaws.com"
|
||||
source_arn = "arn:aws:execute-api:us-east-1:${data.aws_caller_identity.current.account_id}:r03szwwq41/*/*/sonde/{serial}"
|
||||
source_arn = "arn:aws:execute-api:us-east-1:${data.aws_caller_identity.current.account_id}:${aws_apigatewayv2_api.ApiGatewayV2Api.id}/*/*/sonde/{serial}"
|
||||
}
|
||||
|
||||
resource "aws_lambda_permission" "get_sondes" {
|
||||
action = "lambda:InvokeFunction"
|
||||
function_name = aws_lambda_function.get_sondes.arn
|
||||
principal = "apigateway.amazonaws.com"
|
||||
source_arn = "arn:aws:execute-api:us-east-1:${data.aws_caller_identity.current.account_id}:r03szwwq41/*/*/sondes"
|
||||
source_arn = "arn:aws:execute-api:us-east-1:${data.aws_caller_identity.current.account_id}:${aws_apigatewayv2_api.ApiGatewayV2Api.id}/*/*/sondes"
|
||||
}
|
||||
|
||||
resource "aws_lambda_permission" "listeners" {
|
||||
action = "lambda:InvokeFunction"
|
||||
function_name = aws_lambda_function.listeners.arn
|
||||
principal = "apigateway.amazonaws.com"
|
||||
source_arn = "arn:aws:execute-api:us-east-1:${data.aws_caller_identity.current.account_id}:${aws_apigatewayv2_api.ApiGatewayV2Api.id}/*/*/listeners"
|
||||
}
|
||||
|
||||
resource "aws_lambda_permission" "datanew" {
|
||||
action = "lambda:InvokeFunction"
|
||||
function_name = aws_lambda_function.datanew.arn
|
||||
principal = "apigateway.amazonaws.com"
|
||||
source_arn = "arn:aws:execute-api:us-east-1:${data.aws_caller_identity.current.account_id}:${aws_apigatewayv2_api.ApiGatewayV2Api.id}/*/*/datanew"
|
||||
}
|
||||
|
||||
resource "aws_lambda_permission" "predictions" {
|
||||
action = "lambda:InvokeFunction"
|
||||
function_name = aws_lambda_function.predictions.arn
|
||||
principal = "apigateway.amazonaws.com"
|
||||
source_arn = "arn:aws:execute-api:us-east-1:${data.aws_caller_identity.current.account_id}:${aws_apigatewayv2_api.ApiGatewayV2Api.id}/*/*/predictions"
|
||||
}
|
||||
|
||||
|
||||
resource "aws_lambda_permission" "get_telem" {
|
||||
action = "lambda:InvokeFunction"
|
||||
function_name = aws_lambda_function.get_telem.arn
|
||||
principal = "apigateway.amazonaws.com"
|
||||
source_arn = "arn:aws:execute-api:us-east-1:${data.aws_caller_identity.current.account_id}:r03szwwq41/*/*/sondes/telemetry"
|
||||
source_arn = "arn:aws:execute-api:us-east-1:${data.aws_caller_identity.current.account_id}:${aws_apigatewayv2_api.ApiGatewayV2Api.id}/*/*/sondes/telemetry"
|
||||
}
|
||||
|
||||
resource "aws_lambda_permission" "LambdaPermission2" {
|
||||
action = "lambda:InvokeFunction"
|
||||
function_name = aws_lambda_function.LambdaFunction.arn
|
||||
principal = "apigateway.amazonaws.com"
|
||||
source_arn = "arn:aws:execute-api:us-east-1:${data.aws_caller_identity.current.account_id}:r03szwwq41/*/*/sondes/telemetry"
|
||||
source_arn = "arn:aws:execute-api:us-east-1:${data.aws_caller_identity.current.account_id}:${aws_apigatewayv2_api.ApiGatewayV2Api.id}/*/*/sondes/telemetry"
|
||||
}
|
||||
|
||||
resource "aws_lambda_layer_version" "LambdaLayerVersion2" {
|
||||
@ -662,11 +762,27 @@ resource "aws_apigatewayv2_api" "ApiGatewayV2Api" {
|
||||
api_key_selection_expression = "$request.header.x-api-key"
|
||||
protocol_type = "HTTP"
|
||||
route_selection_expression = "$request.method $request.path"
|
||||
|
||||
cors_configuration {
|
||||
allow_credentials = false
|
||||
allow_headers = [
|
||||
"*",
|
||||
]
|
||||
allow_methods = [
|
||||
"*",
|
||||
]
|
||||
allow_origins = [
|
||||
"*",
|
||||
]
|
||||
expose_headers = []
|
||||
max_age = 0
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
resource "aws_apigatewayv2_stage" "ApiGatewayV2Stage" {
|
||||
name = "$default"
|
||||
api_id = aws_apigatewayv2_api.ApiGatewayV2Api.id
|
||||
name = "$default"
|
||||
api_id = aws_apigatewayv2_api.ApiGatewayV2Api.id
|
||||
default_route_settings {
|
||||
detailed_metrics_enabled = false
|
||||
}
|
||||
@ -724,6 +840,30 @@ resource "aws_apigatewayv2_route" "get_sondes" {
|
||||
target = "integrations/${aws_apigatewayv2_integration.get_sondes.id}"
|
||||
}
|
||||
|
||||
resource "aws_apigatewayv2_route" "listeners" {
|
||||
api_id = aws_apigatewayv2_api.ApiGatewayV2Api.id
|
||||
api_key_required = false
|
||||
authorization_type = "NONE"
|
||||
route_key = "GET /listeners"
|
||||
target = "integrations/${aws_apigatewayv2_integration.listeners.id}"
|
||||
}
|
||||
|
||||
resource "aws_apigatewayv2_route" "datanew" {
|
||||
api_id = aws_apigatewayv2_api.ApiGatewayV2Api.id
|
||||
api_key_required = false
|
||||
authorization_type = "NONE"
|
||||
route_key = "GET /datanew"
|
||||
target = "integrations/${aws_apigatewayv2_integration.datanew.id}"
|
||||
}
|
||||
|
||||
resource "aws_apigatewayv2_route" "predictions" {
|
||||
api_id = aws_apigatewayv2_api.ApiGatewayV2Api.id
|
||||
api_key_required = false
|
||||
authorization_type = "NONE"
|
||||
route_key = "GET /predictions"
|
||||
target = "integrations/${aws_apigatewayv2_integration.predictions.id}"
|
||||
}
|
||||
|
||||
resource "aws_apigatewayv2_route" "get_telem" {
|
||||
api_id = aws_apigatewayv2_api.ApiGatewayV2Api.id
|
||||
api_key_required = false
|
||||
@ -762,6 +902,36 @@ resource "aws_apigatewayv2_integration" "get_sondes" {
|
||||
payload_format_version = "2.0"
|
||||
}
|
||||
|
||||
resource "aws_apigatewayv2_integration" "listeners" {
|
||||
api_id = aws_apigatewayv2_api.ApiGatewayV2Api.id
|
||||
connection_type = "INTERNET"
|
||||
integration_method = "POST"
|
||||
integration_type = "AWS_PROXY"
|
||||
integration_uri = aws_lambda_function.listeners.arn
|
||||
timeout_milliseconds = 30000
|
||||
payload_format_version = "2.0"
|
||||
}
|
||||
|
||||
resource "aws_apigatewayv2_integration" "datanew" {
|
||||
api_id = aws_apigatewayv2_api.ApiGatewayV2Api.id
|
||||
connection_type = "INTERNET"
|
||||
integration_method = "POST"
|
||||
integration_type = "AWS_PROXY"
|
||||
integration_uri = aws_lambda_function.datanew.arn
|
||||
timeout_milliseconds = 30000
|
||||
payload_format_version = "2.0"
|
||||
}
|
||||
|
||||
resource "aws_apigatewayv2_integration" "predictions" {
|
||||
api_id = aws_apigatewayv2_api.ApiGatewayV2Api.id
|
||||
connection_type = "INTERNET"
|
||||
integration_method = "POST"
|
||||
integration_type = "AWS_PROXY"
|
||||
integration_uri = aws_lambda_function.predictions.arn
|
||||
timeout_milliseconds = 30000
|
||||
payload_format_version = "2.0"
|
||||
}
|
||||
|
||||
resource "aws_apigatewayv2_integration" "get_telem" {
|
||||
api_id = aws_apigatewayv2_api.ApiGatewayV2Api.id
|
||||
connection_type = "INTERNET"
|
||||
|
250
predict/lambda_function.py
Normal file
250
predict/lambda_function.py
Normal file
@ -0,0 +1,250 @@
|
||||
import boto3
|
||||
import botocore.credentials
|
||||
from botocore.awsrequest import AWSRequest
|
||||
from botocore.endpoint import URLLib3Session
|
||||
from botocore.auth import SigV4Auth
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import sys, traceback
|
||||
import http.client
|
||||
|
||||
|
||||
HOST = os.getenv("ES")
|
||||
|
||||
def predict(event, context):
|
||||
|
||||
path = "telm-*/_search"
|
||||
payload = {
|
||||
"aggs": {
|
||||
"2": {
|
||||
"terms": {
|
||||
"field": "serial.keyword",
|
||||
"order": {
|
||||
"_key": "desc"
|
||||
},
|
||||
"size": 1000
|
||||
},
|
||||
"aggs": {
|
||||
"3": {
|
||||
"date_histogram": {
|
||||
"field": "datetime",
|
||||
"fixed_interval": "30s"
|
||||
},
|
||||
"aggs": {
|
||||
"1": {
|
||||
"top_hits": {
|
||||
"docvalue_fields": [
|
||||
{
|
||||
"field": "alt"
|
||||
}
|
||||
],
|
||||
"_source": "alt",
|
||||
"size": 1,
|
||||
"sort": [
|
||||
{
|
||||
"datetime": {
|
||||
"order": "desc"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"4": {
|
||||
"serial_diff": {
|
||||
"buckets_path": "4-metric"
|
||||
}
|
||||
},
|
||||
"5": {
|
||||
"top_hits": {
|
||||
"docvalue_fields": [
|
||||
{
|
||||
"field": "position"
|
||||
}
|
||||
],
|
||||
"_source": "position",
|
||||
"size": 1,
|
||||
"sort": [
|
||||
{
|
||||
"datetime": {
|
||||
"order": "desc"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"4-metric": {
|
||||
"avg": {
|
||||
"field": "alt"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"size": 0,
|
||||
"stored_fields": [
|
||||
"*"
|
||||
],
|
||||
"script_fields": {},
|
||||
"docvalue_fields": [
|
||||
{
|
||||
"field": "@timestamp",
|
||||
"format": "date_time"
|
||||
},
|
||||
{
|
||||
"field": "datetime",
|
||||
"format": "date_time"
|
||||
},
|
||||
{
|
||||
"field": "log_date",
|
||||
"format": "date_time"
|
||||
},
|
||||
{
|
||||
"field": "time_received",
|
||||
"format": "date_time"
|
||||
},
|
||||
{
|
||||
"field": "time_server",
|
||||
"format": "date_time"
|
||||
},
|
||||
{
|
||||
"field": "time_uploaded",
|
||||
"format": "date_time"
|
||||
}
|
||||
],
|
||||
"_source": {
|
||||
"excludes": []
|
||||
},
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": [],
|
||||
"filter": [
|
||||
{
|
||||
"match_all": {}
|
||||
},
|
||||
{
|
||||
"range": {
|
||||
"datetime": {
|
||||
"gte": "now-1h",
|
||||
"lte": "now",
|
||||
"format": "strict_date_optional_time"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"should": [],
|
||||
"must_not": []
|
||||
}
|
||||
}
|
||||
}
|
||||
if "queryStringParameters" in event:
|
||||
if "vehicles" in event["queryStringParameters"] and event["queryStringParameters"]["vehicles"] != "RS_*;*chase":
|
||||
payload["query"]["bool"]["filter"].append(
|
||||
{
|
||||
"match_phrase": {
|
||||
"serial": str(event["queryStringParameters"]["vehicles"])
|
||||
}
|
||||
}
|
||||
)
|
||||
results = es_request(payload, path, "GET")
|
||||
|
||||
|
||||
|
||||
serials = { }
|
||||
for x in results['aggregations']['2']['buckets']:
|
||||
try:
|
||||
serials[x['key']] = {
|
||||
"alt": sorted(x['3']['buckets'], key=lambda k: k['key_as_string'])[-1]['1']['hits']['hits'][0]['fields']['alt'][0],
|
||||
"position": sorted(x['3']['buckets'], key=lambda k: k['key_as_string'])[-1]['5']['hits']['hits'][0]['fields']['position'][0].split(","),
|
||||
"rate": sorted(x['3']['buckets'], key=lambda k: k['key_as_string'])[-1]['4']['value']/30, # as we bucket for every 30 seconds at the moment,
|
||||
"time": sorted(x['3']['buckets'], key=lambda k: k['key_as_string'])[-1]['key_as_string']
|
||||
}
|
||||
except:
|
||||
pass
|
||||
|
||||
conn = http.client.HTTPSConnection("predict.cusf.co.uk")
|
||||
serial_data={}
|
||||
for serial in serials:
|
||||
value = serials[serial]
|
||||
ascent_rate=value['rate'] if value['rate'] > 0 else 5 # this shouldn't really be used but it makes the API happy
|
||||
descent_rate=abs(value['rate'] if value['rate'] < 0 else 6)
|
||||
burst_altitude = (value['alt']+0.05) if value['alt'] > 26000 else 26000
|
||||
|
||||
conn.request("GET",
|
||||
f"/api/v1/?launch_latitude={value['position'][0].strip()}&launch_longitude={float(value['position'][1].strip())+180}&launch_datetime={value['time']}&launch_altitude={value['alt']}&ascent_rate={ascent_rate}&burst_altitude={burst_altitude}&descent_rate={descent_rate}"
|
||||
)
|
||||
res = conn.getresponse()
|
||||
data = res.read()
|
||||
serial_data[serial] = json.loads(data.decode("utf-8"))
|
||||
|
||||
output = []
|
||||
for serial in serial_data:
|
||||
value = serial_data[serial]
|
||||
|
||||
|
||||
data = []
|
||||
|
||||
for stage in value['prediction']:
|
||||
if stage['stage'] == 'ascent' and serials[serial]['rate'] < 0: # ignore ascent stage if we have already burst
|
||||
continue
|
||||
else:
|
||||
for item in stage['trajectory']:
|
||||
data.append({
|
||||
"time": item['datetime'],
|
||||
"lat": item['latitude'],
|
||||
"lon": item['longitude'] -180,
|
||||
"alt": item['altitude'],
|
||||
})
|
||||
|
||||
output.append({
|
||||
"vehicle": serial,
|
||||
"time": value['request']['launch_datetime'],
|
||||
"latitude": value['request']['launch_latitude'],
|
||||
"longitude": value['request']['launch_longitude']-180,
|
||||
"altitude": value['request']['launch_altitude'],
|
||||
"ascent_rate":value['request']['ascent_rate'],
|
||||
"descent_rate":value['request']['descent_rate'],
|
||||
"burst_altitude": value['request']['burst_altitude'],
|
||||
"landed": 0,
|
||||
"data": json.dumps(data)
|
||||
})
|
||||
|
||||
return json.dumps(output)
|
||||
|
||||
def es_request(payload, path, method):
|
||||
# get aws creds
|
||||
session = boto3.Session()
|
||||
|
||||
params = json.dumps(payload)
|
||||
headers = {"Host": HOST, "Content-Type": "application/json"}
|
||||
request = AWSRequest(
|
||||
method="POST", url=f"https://{HOST}/{path}", data=params, headers=headers
|
||||
)
|
||||
SigV4Auth(boto3.Session().get_credentials(), "es", "us-east-1").add_auth(request)
|
||||
|
||||
session = URLLib3Session()
|
||||
r = session.send(request.prepare())
|
||||
return json.loads(r.text)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# print(get_sondes({"queryStringParameters":{"lat":"-28.22717","lon":"153.82996","distance":"50000"}}, {}))
|
||||
# mode: 6hours
|
||||
# type: positions
|
||||
# format: json
|
||||
# max_positions: 0
|
||||
# position_id: 0
|
||||
# vehicles: RS_*;*chase
|
||||
print(
|
||||
get_sondes_in_air_rates(
|
||||
{},{}
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
# get list of sondes, serial, lat,lon, alt
|
||||
# and current rate
|
||||
# for each one, request http://predict.cusf.co.uk/api/v1/?launch_latitude=-37.8136&launch_longitude=144.9631&launch_datetime=2021-02-22T00:15:18.513413Z&launch_altitude=30000&ascent_rate=5&burst_altitude=30000.1&descent_rate=5
|
||||
# have to set the burst alt slightly higher than the launch
|
@ -5,7 +5,8 @@ from botocore.endpoint import URLLib3Session
|
||||
from botocore.auth import SigV4Auth
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import sys, traceback
|
||||
|
||||
HOST = os.getenv("ES")
|
||||
# get current sondes, filter by date, location
|
||||
@ -184,6 +185,254 @@ def get_telem(event, context):
|
||||
for sonde in results["aggregations"]["2"]["buckets"]
|
||||
}
|
||||
return json.dumps(output)
|
||||
def datanew(event, context):
|
||||
durations = { # ideally we shouldn't need to predefine these, but it's a shit load of data and we don't need want to overload ES
|
||||
"3days": (259200, 1200), # 3d, 20m
|
||||
"1day": (86400, 600), # 1d, 10m
|
||||
"12hours": (43200, 120), # 12h, 2m
|
||||
"6hours": (21600, 60), # 6h, 1m
|
||||
"3hours": (10800, 10), # 3h, 10s
|
||||
"1hour": (3600, 1), # 1h, 1s
|
||||
}
|
||||
duration_query = "1hour"
|
||||
requested_time = datetime.now()
|
||||
|
||||
|
||||
if event["queryStringParameters"]['type'] != 'positions':
|
||||
raise ValueError
|
||||
|
||||
max_positions = int(event["queryStringParameters"]["max_positions"]) if "max_positions" in event["queryStringParameters"] else 10000
|
||||
|
||||
if event["queryStringParameters"]["mode"] in durations:
|
||||
duration_query = event["queryStringParameters"]["mode"]
|
||||
else:
|
||||
return f"Duration must be either {', '.join(durations.keys())}"
|
||||
|
||||
(duration, interval) = durations[duration_query]
|
||||
if "vehicles" in event["queryStringParameters"] and (event["queryStringParameters"]['vehicles'] != "RS_*;*chase" or event["queryStringParameters"]['vehicles'] != ""):
|
||||
interval = 5
|
||||
|
||||
if event["queryStringParameters"]["position_id"] != "0":
|
||||
requested_time = datetime.fromisoformat(event["queryStringParameters"]["position_id"].replace("Z", "+00:00"))
|
||||
lt = datetime.now()
|
||||
gte = requested_time
|
||||
else:
|
||||
lt = datetime.now()
|
||||
gte = datetime.now() - timedelta(0,duration)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
path = "telm-*/_search"
|
||||
payload = {
|
||||
"aggs": {
|
||||
"2": {
|
||||
"terms": {
|
||||
"field": "serial.keyword",
|
||||
"order": {"_key": "desc"},
|
||||
"size": 10000,
|
||||
},
|
||||
"aggs": {
|
||||
"3": {
|
||||
"date_histogram": {
|
||||
"field": "datetime",
|
||||
"fixed_interval": f"{str(interval)}s",
|
||||
"time_zone": "Australia/Brisbane",
|
||||
"min_doc_count": 1,
|
||||
},
|
||||
"aggs": {
|
||||
"1": {
|
||||
"top_hits": {
|
||||
"size": 1,
|
||||
"sort": [{"datetime": {"order": "desc"}}],
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
"query": {
|
||||
"bool": {
|
||||
"filter": [
|
||||
{"match_all": {}},
|
||||
{
|
||||
"range": {
|
||||
"datetime": {
|
||||
"gte": gte.isoformat(),
|
||||
"lt": lt.isoformat()
|
||||
}
|
||||
}
|
||||
},
|
||||
]
|
||||
}
|
||||
},
|
||||
}
|
||||
if "vehicles" in event["queryStringParameters"] and event["queryStringParameters"]['vehicles'] != "RS_*;*chase" and event["queryStringParameters"]['vehicles'] != "":
|
||||
payload["query"]["bool"]["filter"].append(
|
||||
{
|
||||
"match_phrase": {
|
||||
"serial": str(event["queryStringParameters"]["vehicles"])
|
||||
}
|
||||
}
|
||||
)
|
||||
results = es_request(payload, path, "POST")
|
||||
|
||||
output = {
|
||||
"positions": {
|
||||
"position": []
|
||||
}
|
||||
}
|
||||
|
||||
for sonde in results["aggregations"]["2"]["buckets"]:
|
||||
for frame in sonde["3"]["buckets"]:
|
||||
try:
|
||||
frame_data = frame['1']['hits']['hits'][0]['_source']
|
||||
frequency = f'{frame_data["frequency"]} MHz' if "frequency" in frame_data else ""
|
||||
pressure = f'{frame_data["pressure"]}hPa' if "pressure" in frame_data else ""
|
||||
bt = f'BT {frame_data["burst_timer"]}' if "burst_timer" in frame_data else ""
|
||||
batt = f'{frame_data["batt"]}V' if "batt" in frame_data else ""
|
||||
subtype = frame_data["subtype"] if "subtype" in frame_data else ""
|
||||
output["positions"]["position"].append({
|
||||
"position_id": f'{frame_data["serial"]}-{frame_data["datetime"]}',
|
||||
"mission_id": "0",
|
||||
"vehicle": frame_data["serial"],
|
||||
"server_time": frame_data["datetime"],
|
||||
"gps_time": frame_data["datetime"],
|
||||
"gps_lat": frame_data["lat"],
|
||||
"gps_lon": frame_data["lon"],
|
||||
"gps_alt": frame_data["alt"],
|
||||
"gps_heading": "",
|
||||
"gps_speed": frame_data["vel_h"],
|
||||
"picture": "",
|
||||
"temp_inside": "",
|
||||
"data": {
|
||||
"comment": f"{subtype} {frame_data['serial']} {frequency} {pressure} {bt} {batt}",
|
||||
"temperature_external": "-34.2",
|
||||
"humidity": "17.9"
|
||||
},
|
||||
"callsign": frame_data["uploader_callsign"],
|
||||
"sequence": "0"
|
||||
})
|
||||
except:
|
||||
pass
|
||||
output["positions"]["position"] = sorted(output["positions"]["position"], key=lambda k: k['position_id'])
|
||||
return json.dumps(output)
|
||||
|
||||
|
||||
def get_listeners(event, context):
|
||||
|
||||
path = "telm-*/_search"
|
||||
payload = {
|
||||
"aggs": {
|
||||
"2": {
|
||||
"terms": {
|
||||
"field": "uploader_callsign.keyword",
|
||||
"order": {
|
||||
"_key": "desc"
|
||||
},
|
||||
"size": 500
|
||||
},
|
||||
"aggs": {
|
||||
"1": {
|
||||
"top_hits": {
|
||||
"_source": False,
|
||||
"size": 1,
|
||||
"docvalue_fields": ["uploader_position", "uploader_alt", "uploader_antenna.keyword","software_name.keyword", "software_version.keyword", "datetime" ],
|
||||
"sort": [
|
||||
{
|
||||
"datetime": {
|
||||
"order": "desc"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"size": 0,
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": [],
|
||||
"filter": [
|
||||
{
|
||||
"match_all": {}
|
||||
},
|
||||
{
|
||||
"exists": {
|
||||
"field": "uploader_position"
|
||||
},
|
||||
},
|
||||
{
|
||||
"exists": {
|
||||
"field": "uploader_alt"
|
||||
},
|
||||
},
|
||||
{
|
||||
"exists": {
|
||||
"field": "uploader_antenna.keyword"
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
"exists": {
|
||||
"field": "software_name.keyword"
|
||||
},
|
||||
},
|
||||
{
|
||||
"exists": {
|
||||
"field": "software_version.keyword"
|
||||
},
|
||||
},
|
||||
{
|
||||
"exists": {
|
||||
"field": "datetime"
|
||||
},
|
||||
},
|
||||
{
|
||||
"range": {
|
||||
"datetime": {
|
||||
"gte": "now-7d",
|
||||
"lte": "now",
|
||||
"format": "strict_date_optional_time"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"should": [],
|
||||
"must_not": [
|
||||
{
|
||||
"match_phrase": {
|
||||
"type": "SondehubV1"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
results = es_request(payload, path, "GET")
|
||||
|
||||
output = [
|
||||
{
|
||||
"name": listener['key'],
|
||||
"tdiff_hours": (datetime.now(timezone.utc) - datetime.fromisoformat(listener["1"]["hits"]["hits"][0]["fields"]['datetime'][0].replace("Z", "+00:00"))).seconds/60/60,
|
||||
"lon": float(listener["1"]["hits"]["hits"][0]["fields"]['uploader_position'][0].replace(" ","").split(",")[1]),
|
||||
"lat": float(listener["1"]["hits"]["hits"][0]["fields"]['uploader_position'][0].replace(" ","").split(",")[0]),
|
||||
"alt": float(listener["1"]["hits"]["hits"][0]["fields"]['uploader_alt'][0]),
|
||||
"description": f"""\n
|
||||
<font size=\"-2\"><BR>\n
|
||||
<B>Radio: {listener["1"]["hits"]["hits"][0]["fields"]["software_name.keyword"][0]}-{listener["1"]["hits"]["hits"][0]["fields"]["software_version.keyword"][0]}</B><BR>\n
|
||||
<B>Antenna: </B>{listener["1"]["hits"]["hits"][0]["fields"]["uploader_antenna.keyword"][0]}<BR>\n
|
||||
<B>Last Contact: </B>{listener["1"]["hits"]["hits"][0]["fields"]["datetime"][0]} <BR>\n
|
||||
</font>\n
|
||||
"""
|
||||
}
|
||||
for listener in results["aggregations"]["2"]["buckets"]
|
||||
]
|
||||
return json.dumps(output)
|
||||
|
||||
|
||||
def es_request(payload, path, method):
|
||||
@ -204,8 +453,21 @@ def es_request(payload, path, method):
|
||||
|
||||
if __name__ == "__main__":
|
||||
# print(get_sondes({"queryStringParameters":{"lat":"-28.22717","lon":"153.82996","distance":"50000"}}, {}))
|
||||
# mode: 6hours
|
||||
# type: positions
|
||||
# format: json
|
||||
# max_positions: 0
|
||||
# position_id: 0
|
||||
# vehicles: RS_*;*chase
|
||||
print(
|
||||
get_telem(
|
||||
{"queryStringParameters": {"duration": "6h", "datetime": "2021-01-31T00:10:40.001000Z"}}, {}
|
||||
datanew(
|
||||
{"queryStringParameters":{
|
||||
"mode": "1day",
|
||||
"type": "positions",
|
||||
"format": "json",
|
||||
"max_positions": "0",
|
||||
"position_id": "0",
|
||||
"vehicles": "S2720104"
|
||||
}},{}
|
||||
)
|
||||
)
|
||||
|
Loading…
x
Reference in New Issue
Block a user