Add reordering of CSV field names (#114)

* Add reordering of CSV field names

* Terraform fmt [skip ci]

* pin terraform version data

---------

Co-authored-by: Mark Jessop <darkside@Marks-MacBook-Pro.local>
Co-authored-by: darksidelemm <darksidelemm@users.noreply.github.com>
Co-authored-by: xss <michaela@michaela.lgbt>
This commit is contained in:
Mark Jessop 2023-07-07 11:08:29 +09:30 committed by GitHub
parent 1afc5ab817
commit adb6c377ec
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 131 additions and 69 deletions

View File

@ -24,6 +24,8 @@ jobs:
role-to-assume: arn:aws:iam::143841941773:role/github
role-session-name: Terraform
- uses: hashicorp/setup-terraform@v1
with:
terraform_version: 1.1.5
- name: 'Checkout'
uses: actions/checkout@master
- name: Terraform fmt

View File

@ -23,6 +23,8 @@ jobs:
role-to-assume: arn:aws:iam::143841941773:role/github
role-session-name: Terraform
- uses: hashicorp/setup-terraform@v1
with:
terraform_version: 1.1.5
- name: 'Checkout'
uses: actions/checkout@master
- name: Terraform fmt

43
.terraform.lock.hcl generated Normal file
View File

@ -0,0 +1,43 @@
# This file is maintained automatically by "terraform init".
# Manual edits may be lost in future updates.
provider "registry.terraform.io/hashicorp/archive" {
version = "2.3.0"
hashes = [
"h1:NaDbOqAcA9d8DiAS5/6+5smXwN3/+twJGb3QRiz6pNw=",
"zh:0869128d13abe12b297b0cd13b8767f10d6bf047f5afc4215615aabc39c2eb4f",
"zh:481ed837d63ba3aa45dd8736da83e911e3509dee0e7961bf5c00ed2644f807b3",
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
"zh:9f08fe2977e2166849be24fb9f394e4d2697414d463f7996fd0d7beb4e19a29c",
"zh:9fe566deeafd460d27999ca0bbfd85426a5fcfcb40007b23884deb76da127b6f",
"zh:a1bd9a60925d9769e0da322e4523330ee86af9dc2e770cba1d0247a999ef29cb",
"zh:bb4094c8149f74308b22a87e1ac19bcccca76e8ef021b571074d9bccf1c0c6f0",
"zh:c8984c9def239041ce41ec8e19bbd76a49e74ed2024ff736dad60429dee89bcc",
"zh:ea4bb5ae73db1de3a586e62f39106f5e56770804a55aa5e6b4f642df973e0e75",
"zh:f44a9d596ecc3a8c5653f56ba0cd202ad93b49f76767f4608daf7260b813289e",
"zh:f5c5e6cc9f7f070020ab7d95fcc9ed8e20d5cf219978295a71236e22cbb6d508",
"zh:fd2273f51dcc8f43403bf1e425ba9db08a57c3ddcba5ad7a51742ccde21ca611",
]
}
provider "registry.terraform.io/hashicorp/aws" {
version = "4.60.0"
hashes = [
"h1:XxVhnhtrRW3YueabP668hVZ3qL4th7pcWbx+ot/l864=",
"zh:1853d6bc89e289ac36c13485e8ff877c1be8485e22f545bb32c7a30f1d1856e8",
"zh:4321d145969e3b7ede62fe51bee248a15fe398643f21df9541eef85526bf3641",
"zh:4c01189cc6963abfe724e6b289a7c06d2de9c395011d8d54efa8fe1aac444e2e",
"zh:5934db7baa2eec0f9acb9c7f1c3dd3b3fe1e67e23dd4a49e9fe327832967b32b",
"zh:5fbedf5d55c6e04e34c32b744151e514a80308e7dec633a56b852829b41e4b5a",
"zh:651558e1446cc05061b75e6f5cc6e2959feb17615cd0ace6ec7a2bcc846321c0",
"zh:76875eb697916475e554af080f9d4d3cd1f7d5d58ecdd3317a844a30980f4eec",
"zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
"zh:a52528e6d6c945a6ac45b89e9a70a5435148e4c151241e04c231dd2acc4a8c80",
"zh:af5f94c69025f1c2466a3cf970d1e9bed72938ec33b976c8c067468b6707bb57",
"zh:b6692fad956c9d4ef4266519d9ac2ee9f699f8f2c21627625c9ed63814d41590",
"zh:b74311af5fa5ac6e4eb159c12cfb380dfe2f5cd8685da2eac8073475f398ae60",
"zh:cc5aa6f738baa42edacba5ef1ca0969e5a959422e4491607255f3f6142ba90ed",
"zh:dd1a7ff1b22f0036a76bc905a8229ce7ed0a7eb5a783d3a2586fb1bd920515c3",
"zh:e5ab40c4ad0f1c7bd4d5d834d1aa144e690d1a93329d73b3d37512715a638de9",
]
}

View File

@ -126,18 +126,18 @@ resource "aws_apigatewayv2_integration" "ham_predictions" {
resource "aws_lambda_function" "ham_predictions" {
function_name = "ham_predictions"
handler = "ham_predict.predict"
s3_bucket = aws_s3_bucket_object.lambda.bucket
s3_key = aws_s3_bucket_object.lambda.key
source_code_hash = data.archive_file.lambda.output_base64sha256
function_name = "ham_predictions"
handler = "ham_predict.predict"
s3_bucket = aws_s3_bucket_object.lambda.bucket
s3_key = aws_s3_bucket_object.lambda.key
source_code_hash = data.archive_file.lambda.output_base64sha256
reserved_concurrent_executions = 10
publish = true
memory_size = 128
role = aws_iam_role.basic_lambda_role.arn
runtime = "python3.9"
timeout = 30
architectures = ["arm64"]
publish = true
memory_size = 128
role = aws_iam_role.basic_lambda_role.arn
runtime = "python3.9"
timeout = 30
architectures = ["arm64"]
environment {
variables = {
"ES" = "es.${local.domain_name}"

View File

@ -27,18 +27,18 @@ resource "aws_lambda_function" "ham_get" {
resource "aws_lambda_function" "ham_telem" {
function_name = "ham_get_telem"
handler = "query_ham.get_telem"
s3_bucket = aws_s3_bucket_object.lambda.bucket
s3_key = aws_s3_bucket_object.lambda.key
reserved_concurrent_executions = 10
source_code_hash = data.archive_file.lambda.output_base64sha256
publish = true
memory_size = 1024
role = aws_iam_role.basic_lambda_role.arn
runtime = "python3.9"
timeout = 30
architectures = ["arm64"]
function_name = "ham_get_telem"
handler = "query_ham.get_telem"
s3_bucket = aws_s3_bucket_object.lambda.bucket
s3_key = aws_s3_bucket_object.lambda.key
reserved_concurrent_executions = 10
source_code_hash = data.archive_file.lambda.output_base64sha256
publish = true
memory_size = 1024
role = aws_iam_role.basic_lambda_role.arn
runtime = "python3.9"
timeout = 30
architectures = ["arm64"]
environment {
variables = {
"ES" = "es.${local.domain_name}"

View File

@ -370,11 +370,26 @@ def get_telem_full(event, context):
import csv
content_type = "text/csv"
filename = f'{event["pathParameters"]["payload_callsign"]}.csv'
# Get the set of all keys in all of the data packets.
csv_keys = list(set().union(*(d.keys() for d in data)))
csv_keys.remove("datetime")
csv_keys.insert(0,"datetime") # datetime should be at the front of the CSV
# Fields that we don't include in the CSV output (either not useful, or duplicates)
remove_keys = ["user-agent", "position"]
# Mandatory fields that we put up front in the data
mandatory_keys = ["datetime", "payload_callsign", "lat", "lon", "alt"]
# Metadata fields that we move to the end.
metadata_keys = ["uploader_callsign", "software_name", "software_version", "frequency", "modulation", "baud_rate", "snr", "rssi", "uploader_position", "uploader_antenna", "uploader_radio", "time_received", "raw"]
csv_keys = [x for x in csv_keys if x not in remove_keys]
csv_keys = [x for x in csv_keys if x not in mandatory_keys]
csv_keys = [x for x in csv_keys if x not in metadata_keys]
# Sort the remaining keys alphanumerically
csv_keys.sort()
# Construct our output keys ordering
csv_keys = mandatory_keys + csv_keys + metadata_keys
csv_output = StringIO(newline='')
fc = csv.DictWriter(csv_output, fieldnames=csv_keys)
fc = csv.DictWriter(csv_output, fieldnames=csv_keys, extrasaction='ignore')
fc.writeheader()
fc.writerows(data)

View File

@ -141,18 +141,18 @@ resource "aws_apigatewayv2_integration" "reverse_predictions" {
}
resource "aws_lambda_function" "predictions" {
function_name = "predictions"
handler = "predict.predict"
s3_bucket = aws_s3_bucket_object.lambda.bucket
s3_key = aws_s3_bucket_object.lambda.key
reserved_concurrent_executions = 10
source_code_hash = data.archive_file.lambda.output_base64sha256
publish = true
memory_size = 128
role = aws_iam_role.basic_lambda_role.arn
runtime = "python3.9"
timeout = 30
architectures = ["arm64"]
function_name = "predictions"
handler = "predict.predict"
s3_bucket = aws_s3_bucket_object.lambda.bucket
s3_key = aws_s3_bucket_object.lambda.key
reserved_concurrent_executions = 10
source_code_hash = data.archive_file.lambda.output_base64sha256
publish = true
memory_size = 128
role = aws_iam_role.basic_lambda_role.arn
runtime = "python3.9"
timeout = 30
architectures = ["arm64"]
environment {
variables = {
"ES" = "es.${local.domain_name}"
@ -171,18 +171,18 @@ resource "aws_lambda_permission" "predictions" {
resource "aws_lambda_function" "reverse_predictions" {
function_name = "reverse-predictions"
handler = "reverse_predict.predict"
s3_bucket = aws_s3_bucket_object.lambda.bucket
s3_key = aws_s3_bucket_object.lambda.key
source_code_hash = data.archive_file.lambda.output_base64sha256
publish = true
memory_size = 128
function_name = "reverse-predictions"
handler = "reverse_predict.predict"
s3_bucket = aws_s3_bucket_object.lambda.bucket
s3_key = aws_s3_bucket_object.lambda.key
source_code_hash = data.archive_file.lambda.output_base64sha256
publish = true
memory_size = 128
reserved_concurrent_executions = 10
role = aws_iam_role.basic_lambda_role.arn
runtime = "python3.9"
timeout = 30
architectures = ["arm64"]
role = aws_iam_role.basic_lambda_role.arn
runtime = "python3.9"
timeout = 30
architectures = ["arm64"]
environment {
variables = {
"ES" = "es.${local.domain_name}"
@ -298,16 +298,16 @@ resource "aws_ecs_task_definition" "tawhiri" {
volumesFrom = []
},
{
cpu = 0
cpu = 0
environment = []
essential = false
image = "amazon/aws-cli"
command = [
essential = false
image = "amazon/aws-cli"
command = [
"s3",
"cp",
"s3://ruaumoko/ruaumoko-dataset",
"/ruaumoko/ruaumoko-dataset"
]
"cp",
"s3://ruaumoko/ruaumoko-dataset",
"/ruaumoko/ruaumoko-dataset"
]
logConfiguration = {
logDriver = "awslogs"
options = {
@ -401,7 +401,7 @@ resource "aws_ecs_service" "tawhiri" {
platform_version = "LATEST"
desired_count = 1
enable_execute_command = true
deployment_maximum_percent = 400
deployment_maximum_percent = 400
load_balancer {
container_name = "tawhiri"
container_port = 8000

View File

@ -1,17 +1,17 @@
resource "aws_lambda_function" "get_sondes" {
function_name = "query"
handler = "query.get_sondes"
s3_bucket = aws_s3_bucket_object.lambda.bucket
s3_key = aws_s3_bucket_object.lambda.key
source_code_hash = data.archive_file.lambda.output_base64sha256
publish = true
memory_size = 256
function_name = "query"
handler = "query.get_sondes"
s3_bucket = aws_s3_bucket_object.lambda.bucket
s3_key = aws_s3_bucket_object.lambda.key
source_code_hash = data.archive_file.lambda.output_base64sha256
publish = true
memory_size = 256
reserved_concurrent_executions = 10
role = aws_iam_role.basic_lambda_role.arn
runtime = "python3.9"
timeout = 30
architectures = ["arm64"]
role = aws_iam_role.basic_lambda_role.arn
runtime = "python3.9"
timeout = 30
architectures = ["arm64"]
environment {
variables = {
"ES" = "es.${local.domain_name}"