mirror of
https://github.com/chirpstack/chirpstack.git
synced 2025-02-06 10:49:13 +00:00
This feature makes it possible to select between PostgreSQL and SQLite as database backend using a compile feature-flag. It is not possible to enable both at the same time. --------- Co-authored-by: Momo Bel <plopyomomo@gmail.com>
This commit is contained in:
parent
800d7d0efe
commit
e63296573b
9
.github/workflows/main.yml
vendored
9
.github/workflows/main.yml
vendored
@ -13,6 +13,13 @@ env:
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
database:
|
||||
- postgres
|
||||
- sqlite
|
||||
env:
|
||||
DATABASE: ${{ matrix.database }}
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
@ -32,7 +39,7 @@ jobs:
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: ${{ runner.os }}-cargo-test-${{ hashFiles('**/Cargo.lock') }}
|
||||
key: ${{ runner.os }}-cargo-test-${{ matrix.database }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
-
|
||||
name: Start dependency services
|
||||
run: docker compose up -d
|
||||
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -11,8 +11,12 @@
|
||||
# Binary packages
|
||||
/dist
|
||||
|
||||
# SQLite databases
|
||||
*.sqlite
|
||||
|
||||
# Rust target directory
|
||||
**/target
|
||||
**/target-sqlite
|
||||
|
||||
# Certificates
|
||||
/chirpstack/configuration/certs/*
|
||||
|
24
Cargo.lock
generated
24
Cargo.lock
generated
@ -876,6 +876,7 @@ dependencies = [
|
||||
"rustls 0.23.12",
|
||||
"rustls-native-certs",
|
||||
"rustls-pemfile",
|
||||
"scoped-futures",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_urlencoded",
|
||||
@ -907,7 +908,6 @@ dependencies = [
|
||||
name = "chirpstack_api"
|
||||
version = "4.9.0"
|
||||
dependencies = [
|
||||
"diesel",
|
||||
"hex",
|
||||
"pbjson",
|
||||
"pbjson-build",
|
||||
@ -1303,10 +1303,12 @@ dependencies = [
|
||||
"chrono",
|
||||
"diesel_derives",
|
||||
"itoa",
|
||||
"libsqlite3-sys",
|
||||
"num-bigint",
|
||||
"num-integer",
|
||||
"num-traits",
|
||||
"serde_json",
|
||||
"time",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
@ -2165,9 +2167,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "hyper-util"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3ab92f4f49ee4fb4f997c784b7a2e0fa70050211e0b6a287f898c3c9785ca956"
|
||||
checksum = "cde7055719c54e36e95e8719f95883f22072a48ede39db7fc17a4e1d5281e9b9"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-channel",
|
||||
@ -2466,6 +2468,16 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libsqlite3-sys"
|
||||
version = "0.29.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d4588d65215825ee71ebff9e1c9982067833b1355d7546845ffdb3165cbd7456"
|
||||
dependencies = [
|
||||
"pkg-config",
|
||||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.3.8"
|
||||
@ -5019,6 +5031,12 @@ version = "1.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a84c137d37ab0142f0f2ddfe332651fdbf252e7b7dbb4e67b6c1f1b2e925101"
|
||||
|
||||
[[package]]
|
||||
name = "vcpkg"
|
||||
version = "0.2.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.5"
|
||||
|
2
Makefile
2
Makefile
@ -8,7 +8,7 @@ dist:
|
||||
# Install dev dependencies
|
||||
dev-dependencies:
|
||||
cargo install cross --version 0.2.5
|
||||
cargo install diesel_cli --version 2.2.1 --no-default-features --features postgres
|
||||
cargo install diesel_cli --version 2.2.1 --no-default-features --features postgres,sqlite
|
||||
cargo install cargo-deb --version 1.43.1
|
||||
cargo install cargo-generate-rpm --version 0.12.1
|
||||
|
||||
|
32
README.md
32
README.md
@ -84,7 +84,11 @@ docker compose up -d
|
||||
Run the following command to run the ChirpStack tests:
|
||||
|
||||
```bash
|
||||
# Test (with PostgresQL database backend)
|
||||
make test
|
||||
|
||||
# Test with SQLite database backend
|
||||
DATABASE=sqlite make test
|
||||
```
|
||||
|
||||
### Building ChirpStack binaries
|
||||
@ -109,6 +113,34 @@ make release-amd64
|
||||
make dist
|
||||
```
|
||||
|
||||
By default the above commands will build ChirpStack with the PostgresQL database
|
||||
database backend. Set the `DATABASE=sqlite` env. variable to compile ChirpStack
|
||||
with the SQLite database backend.
|
||||
|
||||
### Database migrations
|
||||
|
||||
To create a new database migration, execute:
|
||||
|
||||
```
|
||||
make migration-generate NAME=test-migration
|
||||
```
|
||||
|
||||
To apply migrations, execute:
|
||||
|
||||
```
|
||||
make migration-run
|
||||
```
|
||||
|
||||
To revert a migration, execute:
|
||||
|
||||
```
|
||||
make migration-revert
|
||||
```
|
||||
|
||||
By default the above commands will execute the migration commands using the
|
||||
PostgresQL database backend. To execute migration commands for the SQLite
|
||||
database backend, set the `DATABASE=sqlite` env. variable.
|
||||
|
||||
## License
|
||||
|
||||
ChirpStack Network Server is distributed under the MIT license. See also
|
||||
|
2
api/rust/Cargo.toml
vendored
2
api/rust/Cargo.toml
vendored
@ -12,7 +12,6 @@
|
||||
default = ["api", "json"]
|
||||
api = ["tonic/transport", "tonic-build/transport", "tokio"]
|
||||
json = ["pbjson", "pbjson-types", "serde"]
|
||||
diesel = ["dep:diesel"]
|
||||
internal = []
|
||||
|
||||
[dependencies]
|
||||
@ -29,7 +28,6 @@
|
||||
pbjson = { version = "0.7", optional = true }
|
||||
pbjson-types = { version = "0.7", optional = true }
|
||||
serde = { version = "1.0", optional = true }
|
||||
diesel = { version = "2.2", features = ["postgres_backend"], optional = true }
|
||||
|
||||
[build-dependencies]
|
||||
tonic-build = { version = "0.12", features = [
|
||||
|
32
api/rust/src/internal.rs
vendored
32
api/rust/src/internal.rs
vendored
@ -2,13 +2,6 @@ include!(concat!(env!("OUT_DIR"), "/internal/internal.rs"));
|
||||
#[cfg(feature = "json")]
|
||||
include!(concat!(env!("OUT_DIR"), "/internal/internal.serde.rs"));
|
||||
|
||||
#[cfg(feature = "diesel")]
|
||||
use diesel::{backend::Backend, deserialize, serialize, sql_types::Binary};
|
||||
#[cfg(feature = "diesel")]
|
||||
use prost::Message;
|
||||
#[cfg(feature = "diesel")]
|
||||
use std::io::Cursor;
|
||||
|
||||
impl DeviceSession {
|
||||
pub fn get_a_f_cnt_down(&self) -> u32 {
|
||||
if self.mac_version().to_string().starts_with("1.0") {
|
||||
@ -30,28 +23,3 @@ impl DeviceSession {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "diesel")]
|
||||
impl<ST, DB> deserialize::FromSql<ST, DB> for DeviceSession
|
||||
where
|
||||
DB: Backend,
|
||||
*const [u8]: deserialize::FromSql<ST, DB>,
|
||||
{
|
||||
fn from_sql(value: DB::RawValue<'_>) -> deserialize::Result<Self> {
|
||||
let bytes = <Vec<u8> as deserialize::FromSql<ST, DB>>::from_sql(value)?;
|
||||
Ok(DeviceSession::decode(&mut Cursor::new(bytes))?)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "diesel")]
|
||||
impl serialize::ToSql<Binary, diesel::pg::Pg> for DeviceSession
|
||||
where
|
||||
[u8]: serialize::ToSql<Binary, diesel::pg::Pg>,
|
||||
{
|
||||
fn to_sql(&self, out: &mut serialize::Output<'_, '_, diesel::pg::Pg>) -> serialize::Result {
|
||||
<[u8] as serialize::ToSql<Binary, diesel::pg::Pg>>::to_sql(
|
||||
&self.encode_to_vec(),
|
||||
&mut out.reborrow(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -26,20 +26,16 @@
|
||||
email_address = "0.2"
|
||||
diesel = { version = "2.2", features = [
|
||||
"chrono",
|
||||
"uuid",
|
||||
"serde_json",
|
||||
"numeric",
|
||||
"64-column-tables",
|
||||
"postgres_backend",
|
||||
] }
|
||||
diesel_migrations = { version = "2.2" }
|
||||
diesel-async = { version = "0.5", features = [
|
||||
"deadpool",
|
||||
"postgres",
|
||||
"async-connection-wrapper",
|
||||
] }
|
||||
tokio-postgres = "0.7"
|
||||
tokio-postgres-rustls = "0.12"
|
||||
tokio-postgres = { version = "0.7", optional = true }
|
||||
tokio-postgres-rustls = { version = "0.12", optional = true }
|
||||
bigdecimal = "0.4"
|
||||
redis = { version = "0.26", features = ["tls-rustls", "tokio-rustls-comp"] }
|
||||
deadpool-redis = { version = "0.16", features = ["cluster"] }
|
||||
@ -53,11 +49,7 @@
|
||||
], default-features = true }
|
||||
|
||||
# ChirpStack API definitions
|
||||
chirpstack_api = { path = "../api/rust", features = [
|
||||
"default",
|
||||
"internal",
|
||||
"diesel",
|
||||
] }
|
||||
chirpstack_api = { path = "../api/rust", features = ["default", "internal"] }
|
||||
lrwn = { path = "../lrwn", features = [
|
||||
"serde",
|
||||
"diesel",
|
||||
@ -161,6 +153,7 @@
|
||||
petgraph = "0.6"
|
||||
prometheus-client = "0.22"
|
||||
pin-project = "1.1"
|
||||
scoped-futures = { version = "0.1", features = ["std"] }
|
||||
signal-hook = "0.3"
|
||||
signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] }
|
||||
|
||||
@ -171,6 +164,23 @@
|
||||
dotenv = "0.15"
|
||||
|
||||
[features]
|
||||
default = ["postgres"]
|
||||
postgres = [
|
||||
"tokio-postgres",
|
||||
"tokio-postgres-rustls",
|
||||
"diesel/postgres_backend",
|
||||
"diesel/serde_json",
|
||||
"diesel/uuid",
|
||||
"diesel-async/postgres",
|
||||
"lrwn/postgres",
|
||||
]
|
||||
sqlite = [
|
||||
"diesel/sqlite",
|
||||
"diesel/returning_clauses_for_sqlite_3_35",
|
||||
"lrwn/sqlite",
|
||||
"diesel-async/sync-connection-wrapper",
|
||||
"diesel-async/sqlite",
|
||||
]
|
||||
test-all-integrations = [
|
||||
"test-integration-amqp",
|
||||
"test-integration-kafka",
|
||||
|
@ -1,20 +1,21 @@
|
||||
.PHONY: dist
|
||||
|
||||
PKG_VERSION := $(shell cargo metadata --no-deps --format-version 1 | jq -r '.packages[0].version')
|
||||
DATABASE ?= postgres
|
||||
|
||||
debug-amd64:
|
||||
cross build --target x86_64-unknown-linux-musl
|
||||
cross build --target x86_64-unknown-linux-musl --no-default-features --features="$(DATABASE)"
|
||||
|
||||
release-amd64:
|
||||
cross build --target x86_64-unknown-linux-musl --release
|
||||
cross build --target x86_64-unknown-linux-musl --release --no-default-features --features="$(DATABASE)"
|
||||
|
||||
dist:
|
||||
# Keep these in this order, as aarch64 is based on Debian Buster (older),
|
||||
# the others on Bullseye. For some build scripts we want to build against
|
||||
# least recent LIBC.
|
||||
cross build --target aarch64-unknown-linux-musl --release
|
||||
cross build --target x86_64-unknown-linux-musl --release
|
||||
cross build --target armv7-unknown-linux-musleabihf --release
|
||||
cross build --target aarch64-unknown-linux-musl --release --no-default-features --features="$(DATABASE)"
|
||||
cross build --target x86_64-unknown-linux-musl --release --no-default-features --features="$(DATABASE)"
|
||||
cross build --target armv7-unknown-linux-musleabihf --release --no-default-features --features="$(DATABASE)"
|
||||
|
||||
cargo deb --target x86_64-unknown-linux-musl --no-build --no-strip
|
||||
cargo deb --target armv7-unknown-linux-musleabihf --no-build --no-strip
|
||||
@ -40,10 +41,38 @@ dist:
|
||||
|
||||
test:
|
||||
cargo fmt --check
|
||||
cargo clippy --no-deps
|
||||
TZ=UTC cargo test
|
||||
cargo clippy --no-deps --no-default-features --features="$(DATABASE)"
|
||||
TZ=UTC cargo test --no-default-features --features="$(DATABASE)"
|
||||
|
||||
test-all:
|
||||
cargo fmt --check
|
||||
cargo clippy --no-deps
|
||||
TZ=UTC cargo test --features test-all-integrations
|
||||
cargo clippy --no-deps --no-default-features --features="$(DATABASE)"
|
||||
TZ=UTC cargo test --no-default-features --features="$(DATABASE),test-all-integrations"
|
||||
|
||||
migration-generate:
|
||||
ifeq ($(NAME),)
|
||||
@echo "You must provide a NAME parameter, e.g. make migration-generate NAME=test-migration"
|
||||
else
|
||||
diesel --config-file diesel_$(DATABASE).toml migration --migration-dir migrations_$(DATABASE) generate $(NAME)
|
||||
endif
|
||||
|
||||
migration-run: chirpstack_test.sqlite
|
||||
ifeq ($(DATABASE),postgres)
|
||||
diesel --config-file diesel_postgres.toml migration --migration-dir migrations_postgres run
|
||||
endif
|
||||
ifeq ($(DATABASE),sqlite)
|
||||
DATABASE_URL="chirpstack_test.sqlite" diesel --config-file diesel_sqlite.toml migration --migration-dir migrations_sqlite run
|
||||
sed -i 's/Timestamp/TimestamptzSqlite/g' src/storage/schema_sqlite.rs
|
||||
endif
|
||||
|
||||
migration-revert: chirpstack_test.sqlite
|
||||
ifeq ($(DATABASE),postgres)
|
||||
diesel --config-file diesel_postgres.toml migration --migration-dir migrations_postgres revert
|
||||
endif
|
||||
ifeq ($(DATABASE),sqlite)
|
||||
DATABASE_URL="chirpstack_test.sqlite" diesel --config-file diesel_sqlite.toml migration --migration-dir migrations_sqlite revert
|
||||
sed -i 's/Timestamp/TimestamptzSqlite/g' src/storage/schema_sqlite.rs
|
||||
endif
|
||||
|
||||
chirpstack_test.sqlite:
|
||||
DATABASE_URL=chirpstack_test.sqlite diesel --config-file diesel_sqlite.toml setup --migration-dir migrations_sqlite
|
||||
|
@ -2,4 +2,4 @@
|
||||
# see diesel.rs/guides/configuring-diesel-cli
|
||||
|
||||
[print_schema]
|
||||
file = "src/storage/schema.rs"
|
||||
file = "src/storage/schema_postgres.rs"
|
5
chirpstack/diesel_sqlite.toml
Normal file
5
chirpstack/diesel_sqlite.toml
Normal file
@ -0,0 +1,5 @@
|
||||
# For documentation on how to configure this file,
|
||||
# see diesel.rs/guides/configuring-diesel-cli
|
||||
|
||||
[print_schema]
|
||||
file = "src/storage/schema_sqlite.rs"
|
18
chirpstack/migrations_sqlite/00000000000000_initial/down.sql
Normal file
18
chirpstack/migrations_sqlite/00000000000000_initial/down.sql
Normal file
@ -0,0 +1,18 @@
|
||||
drop table relay_gateway;
|
||||
drop table multicast_group_gateway;
|
||||
drop table multicast_group_queue_item;
|
||||
drop table multicast_group_device;
|
||||
drop table multicast_group;
|
||||
drop table device_queue_item;
|
||||
drop table device_keys;
|
||||
drop table device;
|
||||
drop table device_profile;
|
||||
drop table api_key;
|
||||
drop table application_integration;
|
||||
drop table application;
|
||||
drop table gateway;
|
||||
drop table tenant_user;
|
||||
drop table tenant;
|
||||
drop table "user";
|
||||
drop table relay_device;
|
||||
drop table device_profile_template;
|
392
chirpstack/migrations_sqlite/00000000000000_initial/up.sql
Normal file
392
chirpstack/migrations_sqlite/00000000000000_initial/up.sql
Normal file
@ -0,0 +1,392 @@
|
||||
-- user
|
||||
create table "user" (
|
||||
id text not null primary key,
|
||||
external_id text null,
|
||||
created_at datetime not null,
|
||||
updated_at datetime not null,
|
||||
is_admin boolean not null,
|
||||
is_active boolean not null,
|
||||
email text not null,
|
||||
email_verified boolean not null,
|
||||
password_hash varchar(200) not null,
|
||||
note text not null
|
||||
);
|
||||
|
||||
create unique index idx_user_email on "user"(email);
|
||||
create unique index idx_user_external_id on "user"(external_id);
|
||||
|
||||
insert into "user" (
|
||||
id,
|
||||
created_at,
|
||||
updated_at,
|
||||
is_admin,
|
||||
is_active,
|
||||
email,
|
||||
email_verified,
|
||||
password_hash,
|
||||
note
|
||||
) values (
|
||||
'05244f12-6daf-4e1f-8315-c66783a0ab56',
|
||||
datetime('now'),
|
||||
datetime('now'),
|
||||
TRUE,
|
||||
TRUE,
|
||||
'admin',
|
||||
FALSE,
|
||||
'$pbkdf2-sha512$i=1,l=64$l8zGKtxRESq3PA2kFhHRWA$H3lGMxOt55wjwoc+myeOoABofJY9oDpldJa7fhqdjbh700V6FLPML75UmBOt9J5VFNjAL1AvqCozA1HJM0QVGA',
|
||||
''
|
||||
);
|
||||
|
||||
-- tenant
|
||||
create table tenant (
|
||||
id text not null primary key,
|
||||
created_at datetime not null,
|
||||
updated_at datetime not null,
|
||||
name varchar(100) not null,
|
||||
description text not null,
|
||||
can_have_gateways boolean not null,
|
||||
max_device_count integer not null,
|
||||
max_gateway_count integer not null,
|
||||
private_gateways_up boolean not null,
|
||||
private_gateways_down boolean not null default FALSE,
|
||||
tags text not null default '{}'
|
||||
);
|
||||
|
||||
-- sqlite has advanced text search with https://www.sqlite.org/fts5.html
|
||||
-- but looks like it is for a full table and not specific per column, to investigate
|
||||
create index idx_tenant_name_trgm on "tenant"(name);
|
||||
|
||||
insert into "tenant" (
|
||||
id,
|
||||
created_at,
|
||||
updated_at,
|
||||
name,
|
||||
description,
|
||||
can_have_gateways,
|
||||
max_device_count,
|
||||
max_gateway_count,
|
||||
private_gateways_up
|
||||
) values (
|
||||
'52f14cd4-c6f1-4fbd-8f87-4025e1d49242',
|
||||
datetime('now'),
|
||||
datetime('now'),
|
||||
'ChirpStack',
|
||||
'',
|
||||
TRUE,
|
||||
0,
|
||||
0,
|
||||
FALSE
|
||||
);
|
||||
|
||||
-- tenant user
|
||||
create table tenant_user (
|
||||
tenant_id text not null references tenant on delete cascade,
|
||||
user_id text not null references "user" on delete cascade,
|
||||
created_at datetime not null,
|
||||
updated_at datetime not null,
|
||||
is_admin boolean not null,
|
||||
is_device_admin boolean not null,
|
||||
is_gateway_admin boolean not null,
|
||||
primary key (tenant_id, user_id)
|
||||
);
|
||||
|
||||
create index idx_tenant_user_user_id on tenant_user (user_id);
|
||||
|
||||
-- gateway
|
||||
create table gateway (
|
||||
gateway_id blob not null primary key,
|
||||
tenant_id text not null references tenant on delete cascade,
|
||||
created_at datetime not null,
|
||||
updated_at datetime not null,
|
||||
last_seen_at datetime,
|
||||
name varchar(100) not null,
|
||||
description text not null,
|
||||
latitude double precision not null,
|
||||
longitude double precision not null,
|
||||
altitude real not null,
|
||||
stats_interval_secs integer not null,
|
||||
tls_certificate blob,
|
||||
tags text not null,
|
||||
properties text not null
|
||||
);
|
||||
|
||||
create index idx_gateway_tenant_id on gateway (tenant_id);
|
||||
create index idx_gateway_name_trgm on gateway (name);
|
||||
create index idx_gateway_id_trgm on gateway (hex(gateway_id));
|
||||
create index idx_gateway_tags on gateway (tags);
|
||||
|
||||
-- application
|
||||
create table application (
|
||||
id text not null primary key,
|
||||
tenant_id text not null references tenant on delete cascade,
|
||||
created_at datetime not null,
|
||||
updated_at datetime not null,
|
||||
name varchar(100) not null,
|
||||
description text not null,
|
||||
mqtt_tls_cert blob,
|
||||
tags text not null default '{}'
|
||||
);
|
||||
|
||||
create index idx_application_tenant_id on application (tenant_id);
|
||||
create index idx_application_name_trgm on application (name);
|
||||
|
||||
-- application integration
|
||||
create table application_integration (
|
||||
application_id text not null references application on delete cascade,
|
||||
kind varchar(20) not null,
|
||||
created_at datetime not null,
|
||||
updated_at datetime not null,
|
||||
configuration text not null,
|
||||
|
||||
primary key (application_id, kind)
|
||||
);
|
||||
|
||||
-- api-key
|
||||
create table api_key (
|
||||
id text not null primary key,
|
||||
created_at datetime not null,
|
||||
name varchar(100) not null,
|
||||
is_admin boolean not null,
|
||||
tenant_id text null references tenant on delete cascade
|
||||
);
|
||||
|
||||
create index idx_api_key_tenant_id on api_key (tenant_id);
|
||||
|
||||
-- device-profile
|
||||
create table device_profile (
|
||||
id text not null primary key,
|
||||
tenant_id text not null references tenant on delete cascade,
|
||||
created_at datetime not null,
|
||||
updated_at datetime not null,
|
||||
name varchar(100) not null,
|
||||
region varchar(10) not null,
|
||||
mac_version varchar(10) not null,
|
||||
reg_params_revision varchar(20) not null,
|
||||
adr_algorithm_id varchar(100) not null,
|
||||
payload_codec_runtime varchar(20) not null,
|
||||
uplink_interval integer not null,
|
||||
device_status_req_interval integer not null,
|
||||
supports_otaa boolean not null,
|
||||
supports_class_b boolean not null,
|
||||
supports_class_c boolean not null,
|
||||
class_b_timeout integer not null,
|
||||
class_b_ping_slot_nb_k integer not null,
|
||||
class_b_ping_slot_dr smallint not null,
|
||||
class_b_ping_slot_freq bigint not null,
|
||||
class_c_timeout integer not null,
|
||||
abp_rx1_delay smallint not null,
|
||||
abp_rx1_dr_offset smallint not null,
|
||||
abp_rx2_dr smallint not null,
|
||||
abp_rx2_freq bigint not null,
|
||||
tags text not null,
|
||||
payload_codec_script text not null default '',
|
||||
flush_queue_on_activate boolean not null default FALSE,
|
||||
description text not null default '',
|
||||
measurements text not null default '{}',
|
||||
auto_detect_measurements boolean not null default TRUE,
|
||||
region_config_id varchar(100) null,
|
||||
is_relay boolean not null default FALSE,
|
||||
is_relay_ed boolean not null default FALSE,
|
||||
relay_ed_relay_only boolean not null default FALSE,
|
||||
relay_enabled boolean not null default FALSE,
|
||||
relay_cad_periodicity smallint not null default 0,
|
||||
relay_default_channel_index smallint not null default 0,
|
||||
relay_second_channel_freq bigint not null default 0,
|
||||
relay_second_channel_dr smallint not null default 0,
|
||||
relay_second_channel_ack_offset smallint not null default 0,
|
||||
relay_ed_activation_mode smallint not null default 0,
|
||||
relay_ed_smart_enable_level smallint not null default 0,
|
||||
relay_ed_back_off smallint not null default 0,
|
||||
relay_ed_uplink_limit_bucket_size smallint not null default 0,
|
||||
relay_ed_uplink_limit_reload_rate smallint not null default 0,
|
||||
relay_join_req_limit_reload_rate smallint not null default 0,
|
||||
relay_notify_limit_reload_rate smallint not null default 0,
|
||||
relay_global_uplink_limit_reload_rate smallint not null default 0,
|
||||
relay_overall_limit_reload_rate smallint not null default 0,
|
||||
relay_join_req_limit_bucket_size smallint not null default 0,
|
||||
relay_notify_limit_bucket_size smallint not null default 0,
|
||||
relay_global_uplink_limit_bucket_size smallint not null default 0,
|
||||
relay_overall_limit_bucket_size smallint not null default 0,
|
||||
allow_roaming boolean not null default TRUE,
|
||||
rx1_delay smallint not null default 0
|
||||
);
|
||||
|
||||
create index idx_device_profile_tenant_id on device_profile (tenant_id);
|
||||
create index idx_device_profile_name_trgm on device_profile (name);
|
||||
create index idx_device_profile_tags on device_profile (tags);
|
||||
|
||||
-- device
|
||||
create table device (
|
||||
dev_eui blob not null primary key,
|
||||
application_id text not null references application on delete cascade,
|
||||
device_profile_id text not null references device_profile on delete cascade,
|
||||
created_at datetime not null,
|
||||
updated_at datetime not null,
|
||||
last_seen_at datetime,
|
||||
scheduler_run_after datetime,
|
||||
name varchar(100) not null,
|
||||
description text not null,
|
||||
external_power_source boolean not null,
|
||||
battery_level numeric(5, 2),
|
||||
margin int,
|
||||
dr smallint,
|
||||
latitude double precision,
|
||||
longitude double precision,
|
||||
altitude real,
|
||||
dev_addr blob,
|
||||
enabled_class char(1) not null,
|
||||
skip_fcnt_check boolean not null,
|
||||
is_disabled boolean not null,
|
||||
tags text not null,
|
||||
variables text not null,
|
||||
join_eui blob not null default x'00000000',
|
||||
secondary_dev_addr blob,
|
||||
device_session blob
|
||||
);
|
||||
|
||||
create index idx_device_application_id on device (application_id);
|
||||
create index idx_device_device_profile_id on device (device_profile_id);
|
||||
create index idx_device_name_trgm on device (name);
|
||||
create index idx_device_dev_eui_trgm on device (hex(dev_eui));
|
||||
create index idx_device_dev_addr_trgm on device (hex(dev_addr));
|
||||
create index idx_device_tags on device (tags);
|
||||
|
||||
create table device_keys (
|
||||
dev_eui blob not null primary key references device on delete cascade,
|
||||
created_at datetime not null,
|
||||
updated_at datetime not null,
|
||||
nwk_key blob not null,
|
||||
app_key blob not null,
|
||||
dev_nonces text not null,
|
||||
join_nonce int not null
|
||||
);
|
||||
|
||||
create table device_queue_item (
|
||||
id text not null primary key,
|
||||
dev_eui blob references device on delete cascade not null,
|
||||
created_at datetime not null,
|
||||
f_port smallint not null,
|
||||
confirmed boolean not null,
|
||||
data blob not null,
|
||||
is_pending boolean not null,
|
||||
f_cnt_down bigint null,
|
||||
timeout_after datetime,
|
||||
is_encrypted boolean default FALSE not null
|
||||
);
|
||||
|
||||
create index idx_device_queue_item_dev_eui on device_queue_item (dev_eui);
|
||||
create index idx_device_queue_item_created_at on device_queue_item (created_at);
|
||||
create index idx_device_queue_item_timeout_after on device_queue_item (timeout_after);
|
||||
|
||||
-- multicast groups
|
||||
create table multicast_group (
|
||||
id text not null primary key,
|
||||
application_id text not null references application on delete cascade,
|
||||
created_at datetime not null,
|
||||
updated_at datetime not null,
|
||||
name varchar(100) not null,
|
||||
region varchar(10) not null,
|
||||
mc_addr blob not null,
|
||||
mc_nwk_s_key blob not null,
|
||||
mc_app_s_key blob not null,
|
||||
f_cnt bigint not null,
|
||||
group_type char(1) not null,
|
||||
dr smallint not null,
|
||||
frequency bigint not null,
|
||||
class_b_ping_slot_nb_k smallint not null,
|
||||
class_c_scheduling_type varchar(20) not null default 'delay'
|
||||
);
|
||||
|
||||
create index idx_multicast_group_application_id on multicast_group (application_id);
|
||||
create index idx_multicast_group_name_trgm on multicast_group (name);
|
||||
|
||||
create table multicast_group_device (
|
||||
multicast_group_id text not null references multicast_group on delete cascade,
|
||||
dev_eui blob not null references device on delete cascade,
|
||||
created_at datetime not null,
|
||||
primary key (multicast_group_id, dev_eui)
|
||||
);
|
||||
|
||||
create table multicast_group_queue_item (
|
||||
id text not null primary key,
|
||||
created_at datetime not null,
|
||||
scheduler_run_after datetime not null,
|
||||
multicast_group_id text not null references multicast_group on delete cascade,
|
||||
gateway_id blob not null references gateway on delete cascade,
|
||||
f_cnt bigint not null,
|
||||
f_port smallint not null,
|
||||
data blob not null,
|
||||
emit_at_time_since_gps_epoch bigint
|
||||
);
|
||||
|
||||
create index idx_multicast_group_queue_item_multicast_group_id on multicast_group_queue_item (multicast_group_id);
|
||||
create index idx_multicast_group_queue_item_scheduler_run_after on multicast_group_queue_item (scheduler_run_after);
|
||||
|
||||
create table device_profile_template (
|
||||
id text not null primary key,
|
||||
created_at datetime not null,
|
||||
updated_at datetime not null,
|
||||
name varchar(100) not null,
|
||||
description text not null,
|
||||
vendor varchar(100) not null,
|
||||
firmware varchar(100) not null,
|
||||
region varchar(10) not null,
|
||||
mac_version varchar(10) not null,
|
||||
reg_params_revision varchar(20) not null,
|
||||
adr_algorithm_id varchar(100) not null,
|
||||
payload_codec_runtime varchar(20) not null,
|
||||
payload_codec_script text not null,
|
||||
uplink_interval integer not null,
|
||||
device_status_req_interval integer not null,
|
||||
flush_queue_on_activate boolean not null,
|
||||
supports_otaa boolean not null,
|
||||
supports_class_b boolean not null,
|
||||
supports_class_c boolean not null,
|
||||
class_b_timeout integer not null,
|
||||
class_b_ping_slot_nb_k integer not null,
|
||||
class_b_ping_slot_dr smallint not null,
|
||||
class_b_ping_slot_freq bigint not null,
|
||||
class_c_timeout integer not null,
|
||||
abp_rx1_delay smallint not null,
|
||||
abp_rx1_dr_offset smallint not null,
|
||||
abp_rx2_dr smallint not null,
|
||||
abp_rx2_freq bigint not null,
|
||||
tags text not null,
|
||||
measurements text not null default '{}',
|
||||
auto_detect_measurements boolean not null default TRUE
|
||||
);
|
||||
|
||||
create table multicast_group_gateway (
|
||||
multicast_group_id text not null references multicast_group on delete cascade,
|
||||
gateway_id blob not null references gateway on delete cascade,
|
||||
created_at datetime not null,
|
||||
primary key (multicast_group_id, gateway_id)
|
||||
);
|
||||
|
||||
create table relay_device (
|
||||
relay_dev_eui blob not null references device on delete cascade,
|
||||
dev_eui blob not null references device on delete cascade,
|
||||
created_at datetime not null,
|
||||
primary key (relay_dev_eui, dev_eui)
|
||||
);
|
||||
|
||||
create index idx_tenant_tags on tenant (tags);
|
||||
create index idx_application_tags on application (tags);
|
||||
create index idx_device_dev_addr on device (dev_addr);
|
||||
create index idx_device_secondary_dev_addr on device (secondary_dev_addr);
|
||||
|
||||
|
||||
-- relay gateway
|
||||
create table relay_gateway (
|
||||
tenant_id text not null references tenant on delete cascade,
|
||||
relay_id blob not null,
|
||||
created_at datetime not null,
|
||||
updated_at datetime not null,
|
||||
last_seen_at datetime,
|
||||
name varchar(100) not null,
|
||||
description text not null,
|
||||
stats_interval_secs integer not null,
|
||||
region_config_id varchar(100) not null,
|
||||
|
||||
primary key (tenant_id, relay_id)
|
||||
);
|
@ -44,7 +44,7 @@ impl ApplicationService for Application {
|
||||
.await?;
|
||||
|
||||
let a = application::Application {
|
||||
tenant_id,
|
||||
tenant_id: tenant_id.into(),
|
||||
name: req_app.name.clone(),
|
||||
description: req_app.description.clone(),
|
||||
tags: fields::KeyValue::new(req_app.tags.clone()),
|
||||
@ -119,7 +119,7 @@ impl ApplicationService for Application {
|
||||
.await?;
|
||||
|
||||
let _ = application::update(application::Application {
|
||||
id: app_id,
|
||||
id: app_id.into(),
|
||||
name: req_app.name.to_string(),
|
||||
description: req_app.description.to_string(),
|
||||
tags: fields::KeyValue::new(req_app.tags.clone()),
|
||||
@ -279,7 +279,7 @@ impl ApplicationService for Application {
|
||||
.await?;
|
||||
|
||||
let i = application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::Http,
|
||||
configuration: application::IntegrationConfiguration::Http(
|
||||
application::HttpConfiguration {
|
||||
@ -367,7 +367,7 @@ impl ApplicationService for Application {
|
||||
.await?;
|
||||
|
||||
let _ = application::update_integration(application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::Http,
|
||||
configuration: application::IntegrationConfiguration::Http(
|
||||
application::HttpConfiguration {
|
||||
@ -438,7 +438,7 @@ impl ApplicationService for Application {
|
||||
.await?;
|
||||
|
||||
let i = application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::InfluxDb,
|
||||
configuration: application::IntegrationConfiguration::InfluxDb(
|
||||
application::InfluxDbConfiguration {
|
||||
@ -535,7 +535,7 @@ impl ApplicationService for Application {
|
||||
.await?;
|
||||
|
||||
let _ = application::update_integration(application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::InfluxDb,
|
||||
configuration: application::IntegrationConfiguration::InfluxDb(
|
||||
application::InfluxDbConfiguration {
|
||||
@ -610,7 +610,7 @@ impl ApplicationService for Application {
|
||||
.await?;
|
||||
|
||||
let i = application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::ThingsBoard,
|
||||
configuration: application::IntegrationConfiguration::ThingsBoard(
|
||||
application::ThingsBoardConfiguration {
|
||||
@ -689,7 +689,7 @@ impl ApplicationService for Application {
|
||||
.await?;
|
||||
|
||||
let _ = application::update_integration(application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::ThingsBoard,
|
||||
configuration: application::IntegrationConfiguration::ThingsBoard(
|
||||
application::ThingsBoardConfiguration {
|
||||
@ -755,7 +755,7 @@ impl ApplicationService for Application {
|
||||
.await?;
|
||||
|
||||
let _ = application::create_integration(application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::MyDevices,
|
||||
configuration: application::IntegrationConfiguration::MyDevices(
|
||||
application::MyDevicesConfiguration {
|
||||
@ -832,7 +832,7 @@ impl ApplicationService for Application {
|
||||
.await?;
|
||||
|
||||
let _ = application::update_integration(application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::MyDevices,
|
||||
configuration: application::IntegrationConfiguration::MyDevices(
|
||||
application::MyDevicesConfiguration {
|
||||
@ -907,7 +907,7 @@ impl ApplicationService for Application {
|
||||
};
|
||||
|
||||
let _ = application::create_integration(application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::LoraCloud,
|
||||
configuration: application::IntegrationConfiguration::LoraCloud(
|
||||
application::LoraCloudConfiguration {
|
||||
@ -1032,7 +1032,7 @@ impl ApplicationService for Application {
|
||||
};
|
||||
|
||||
let _ = application::update_integration(application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::LoraCloud,
|
||||
configuration: application::IntegrationConfiguration::LoraCloud(
|
||||
application::LoraCloudConfiguration {
|
||||
@ -1119,7 +1119,7 @@ impl ApplicationService for Application {
|
||||
.await?;
|
||||
|
||||
let _ = application::create_integration(application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::GcpPubSub,
|
||||
configuration: application::IntegrationConfiguration::GcpPubSub(
|
||||
application::GcpPubSubConfiguration {
|
||||
@ -1202,7 +1202,7 @@ impl ApplicationService for Application {
|
||||
.await?;
|
||||
|
||||
let _ = application::update_integration(application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::GcpPubSub,
|
||||
configuration: application::IntegrationConfiguration::GcpPubSub(
|
||||
application::GcpPubSubConfiguration {
|
||||
@ -1271,7 +1271,7 @@ impl ApplicationService for Application {
|
||||
.await?;
|
||||
|
||||
let _ = application::create_integration(application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::AwsSns,
|
||||
configuration: application::IntegrationConfiguration::AwsSns(
|
||||
application::AwsSnsConfiguration {
|
||||
@ -1354,7 +1354,7 @@ impl ApplicationService for Application {
|
||||
.await?;
|
||||
|
||||
let _ = application::update_integration(application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::AwsSns,
|
||||
configuration: application::IntegrationConfiguration::AwsSns(
|
||||
application::AwsSnsConfiguration {
|
||||
@ -1424,7 +1424,7 @@ impl ApplicationService for Application {
|
||||
.await?;
|
||||
|
||||
let _ = application::create_integration(application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::AzureServiceBus,
|
||||
configuration: application::IntegrationConfiguration::AzureServiceBus(
|
||||
application::AzureServiceBusConfiguration {
|
||||
@ -1506,7 +1506,7 @@ impl ApplicationService for Application {
|
||||
.await?;
|
||||
|
||||
let _ = application::update_integration(application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::AzureServiceBus,
|
||||
configuration: application::IntegrationConfiguration::AzureServiceBus(
|
||||
application::AzureServiceBusConfiguration {
|
||||
@ -1574,7 +1574,7 @@ impl ApplicationService for Application {
|
||||
.await?;
|
||||
|
||||
let _ = application::create_integration(application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::PilotThings,
|
||||
configuration: application::IntegrationConfiguration::PilotThings(
|
||||
application::PilotThingsConfiguration {
|
||||
@ -1653,7 +1653,7 @@ impl ApplicationService for Application {
|
||||
.await?;
|
||||
|
||||
let _ = application::update_integration(application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::PilotThings,
|
||||
configuration: application::IntegrationConfiguration::PilotThings(
|
||||
application::PilotThingsConfiguration {
|
||||
@ -1730,7 +1730,7 @@ impl ApplicationService for Application {
|
||||
}
|
||||
|
||||
let _ = application::create_integration(application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::Ifttt,
|
||||
configuration: application::IntegrationConfiguration::Ifttt(
|
||||
application::IftttConfiguration {
|
||||
@ -1814,7 +1814,7 @@ impl ApplicationService for Application {
|
||||
.await?;
|
||||
|
||||
let _ = application::update_integration(application::Integration {
|
||||
application_id: app_id,
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::Ifttt,
|
||||
configuration: application::IntegrationConfiguration::Ifttt(
|
||||
application::IftttConfiguration {
|
||||
@ -1945,7 +1945,9 @@ pub mod test {
|
||||
}),
|
||||
};
|
||||
let mut create_req = Request::new(create_req);
|
||||
create_req.extensions_mut().insert(AuthID::User(u.id));
|
||||
create_req
|
||||
.extensions_mut()
|
||||
.insert(AuthID::User(Into::<uuid::Uuid>::into(u.id).clone()));
|
||||
let create_resp = service.create(create_req).await.unwrap();
|
||||
let create_resp = create_resp.get_ref();
|
||||
|
||||
@ -1954,7 +1956,9 @@ pub mod test {
|
||||
id: create_resp.id.clone(),
|
||||
};
|
||||
let mut get_req = Request::new(get_req);
|
||||
get_req.extensions_mut().insert(AuthID::User(u.id));
|
||||
get_req
|
||||
.extensions_mut()
|
||||
.insert(AuthID::User(Into::<uuid::Uuid>::into(u.id).clone()));
|
||||
let get_resp = service.get(get_req).await.unwrap();
|
||||
assert_eq!(
|
||||
Some(api::Application {
|
||||
@ -1976,7 +1980,9 @@ pub mod test {
|
||||
}),
|
||||
};
|
||||
let mut up_req = Request::new(up_req);
|
||||
up_req.extensions_mut().insert(AuthID::User(u.id));
|
||||
up_req
|
||||
.extensions_mut()
|
||||
.insert(AuthID::User(Into::<uuid::Uuid>::into(u.id).clone()));
|
||||
let _ = service.update(up_req).await.unwrap();
|
||||
|