mirror of
https://github.com/chirpstack/chirpstack.git
synced 2025-06-23 01:18:54 +00:00
Update dependencies + fix clippy warnings.
This commit is contained in:
@ -11,28 +11,28 @@ license = "MIT"
|
||||
|
||||
[dependencies]
|
||||
# CLI interface
|
||||
clap = "2.33"
|
||||
clap = "3.2"
|
||||
|
||||
# Configuration
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
serde_yaml = "0.8"
|
||||
serde_yaml = "0.9"
|
||||
serde_json = "1.0"
|
||||
humantime-serde = "1.0"
|
||||
humantime-serde = "1.1"
|
||||
toml = "0.5"
|
||||
handlebars = "4.1"
|
||||
handlebars = "4.3"
|
||||
|
||||
# Database
|
||||
validator = "0.13"
|
||||
diesel = { version = "2.0.0-rc.1", features = [ "chrono", "postgres", "r2d2", "uuid", "serde_json", "numeric" ] }
|
||||
diesel_migrations = { version = "2.0.0-rc.1" }
|
||||
validator = "0.16"
|
||||
diesel = { version = "2.0.0", features = [ "chrono", "postgres", "r2d2", "uuid", "serde_json", "numeric" ] }
|
||||
diesel_migrations = { version = "2.0.0" }
|
||||
r2d2 = "0.8"
|
||||
bigdecimal = "0.3"
|
||||
redis = { version = "0.21", features = ["r2d2", "cluster"] }
|
||||
pq-sys = { version = "0.4.6", features = ["pkg-config"] }
|
||||
pq-sys = { version = "0.4.7", features = ["pkg-config"] }
|
||||
|
||||
# Logging
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.2", features = ["fmt", "ansi"], default-features = true }
|
||||
tracing-subscriber = { version = "0.3", features = ["fmt", "ansi"], default-features = true }
|
||||
|
||||
# ChirpStack API definitions
|
||||
chirpstack_api = { path = "../api/rust", features = ["default", "internal"] }
|
||||
@ -43,28 +43,28 @@ backend = { path = "../backend" }
|
||||
reqwest = { version = "0.11", features = ["json"] }
|
||||
|
||||
# Integrations
|
||||
aws-types = "0.3.0"
|
||||
aws-config = "0.3.0"
|
||||
aws-sdk-sns = "0.3.0"
|
||||
aws-types = "0.49"
|
||||
aws-config = "0.49"
|
||||
aws-sdk-sns = "0.19"
|
||||
hmac = "0.12"
|
||||
sha2 = "0.10"
|
||||
urlencoding = "2.1"
|
||||
geohash = "0.12"
|
||||
gcp_auth = "0.7.2"
|
||||
lapin = "2.1.1"
|
||||
tokio-executor-trait = "2.1.0"
|
||||
tokio-reactor-trait = "1.1.0"
|
||||
rdkafka = { version = "0.28.0", features = ["cmake-build"]}
|
||||
gcp_auth = "0.7"
|
||||
lapin = "2.1"
|
||||
tokio-executor-trait = "2.1"
|
||||
tokio-reactor-trait = "1.1"
|
||||
rdkafka = { version = "0.28", features = ["cmake-build"]}
|
||||
|
||||
# gRPC and Protobuf
|
||||
tonic = "0.7"
|
||||
tonic-web = "0.3"
|
||||
tonic-reflection = "0.4"
|
||||
tokio = { version = "1.17", features = ["macros", "rt-multi-thread"] }
|
||||
tokio-stream = "0.1.8"
|
||||
prost-types = "0.10"
|
||||
prost = "0.10"
|
||||
pbjson-types = "0.3"
|
||||
tonic = "0.8"
|
||||
tonic-web = "0.4"
|
||||
tonic-reflection = "0.5"
|
||||
tokio = { version = "1.21", features = ["macros", "rt-multi-thread"] }
|
||||
tokio-stream = "0.1"
|
||||
prost-types = "0.11"
|
||||
prost = "0.11"
|
||||
pbjson-types = "0.5"
|
||||
|
||||
# gRPC and HTTP multiplexing
|
||||
warp = { version = "0.3", features = ["tls"] }
|
||||
@ -75,37 +75,36 @@ http = "0.2"
|
||||
http-body = "0.4"
|
||||
rust-embed = "6.4"
|
||||
mime_guess = "2.0"
|
||||
tower-http = { version = "0.1", features = ["trace", "auth"] }
|
||||
tower-http = { version = "0.3", features = ["trace", "auth"] }
|
||||
|
||||
# Error handling
|
||||
thiserror = "1.0"
|
||||
anyhow = "1.0"
|
||||
|
||||
# Authentication
|
||||
pbkdf2 = "0.8"
|
||||
pbkdf2 = "0.11"
|
||||
rand_core = { version = "0.6", features = ["std"] }
|
||||
# jsonwebtoken = "8.0"
|
||||
jsonwebtoken = "8.1.0"
|
||||
jsonwebtoken = "8.1"
|
||||
openssl = { version = "0.10", features = ["vendored"] }
|
||||
openidconnect = { version = "2.3.1", features = ["accept-rfc3339-timestamps"] }
|
||||
openidconnect = { version = "2.3", features = ["accept-rfc3339-timestamps"] }
|
||||
|
||||
# MQTT
|
||||
paho-mqtt = { version = "0.11", features = ["vendored-ssl"] }
|
||||
hex = "0.4"
|
||||
|
||||
# Codecs
|
||||
rquickjs = { version = "0.1.6", features = ["bindgen", "loader", "array-buffer", "chrono"] }
|
||||
rquickjs = { version = "0.1", features = ["bindgen", "loader", "array-buffer", "chrono"] }
|
||||
|
||||
# Misc
|
||||
lazy_static = "1.4"
|
||||
uuid = { version = "1.1", features = [ "v4", "serde" ] }
|
||||
chrono = "0.4"
|
||||
async-trait = "0.1"
|
||||
aes = "0.7"
|
||||
aes = "0.8"
|
||||
rand = "0.8"
|
||||
base64 = "0.13"
|
||||
async-recursion = "1.0"
|
||||
regex = "1"
|
||||
regex = "1.6"
|
||||
petgraph = "0.6"
|
||||
prometheus-client = "0.18"
|
||||
pin-project = "1.0"
|
||||
@ -113,7 +112,7 @@ pin-project = "1.0"
|
||||
# Development and testing
|
||||
[dev-dependencies]
|
||||
httpmock = "0.6"
|
||||
bytes = "1.1"
|
||||
bytes = "1.2"
|
||||
|
||||
# Debian packaging.
|
||||
[package.metadata.deb]
|
||||
|
@ -110,7 +110,7 @@ pub struct Request {
|
||||
pub uplink_history: Vec<internal::UplinkAdrHistory>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct Response {
|
||||
pub dr: u8,
|
||||
pub tx_power_index: u8,
|
||||
|
@ -7,7 +7,7 @@ use jsonwebtoken::{decode, encode, Algorithm, DecodingKey, EncodingKey, Header,
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Serialize, Deserialize, PartialEq, Debug)]
|
||||
#[derive(Serialize, Deserialize, PartialEq, Eq, Debug)]
|
||||
pub struct AuthClaim {
|
||||
pub aud: String,
|
||||
#[serde(default, skip_serializing_if = "is_default")]
|
||||
|
@ -6,7 +6,7 @@ pub mod claims;
|
||||
pub mod error;
|
||||
pub mod validator;
|
||||
|
||||
#[derive(PartialEq, Debug)]
|
||||
#[derive(PartialEq, Eq, Debug)]
|
||||
pub enum AuthID {
|
||||
None,
|
||||
User(Uuid),
|
||||
|
@ -85,7 +85,7 @@ impl ToStatus for tokio::task::JoinError {
|
||||
}
|
||||
}
|
||||
|
||||
impl ToStatus for prost_types::TimestampOutOfSystemRangeError {
|
||||
impl ToStatus for prost_types::TimestampError {
|
||||
fn status(&self) -> Status {
|
||||
Status::new(Code::Internal, format!("{}", self))
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
use aes::cipher::generic_array::GenericArray;
|
||||
use aes::{Aes128, Block, BlockEncrypt, NewBlockCipher};
|
||||
use aes::cipher::{BlockEncrypt, KeyInit};
|
||||
use aes::{Aes128, Block};
|
||||
use anyhow::Result;
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use tracing::info;
|
||||
|
@ -304,7 +304,7 @@ impl Data {
|
||||
device_name: self.device.name.clone(),
|
||||
dev_eui: self.device.dev_eui.to_string(),
|
||||
tags: {
|
||||
let mut tags = (&*self.device_profile.tags).clone();
|
||||
let mut tags = (*self.device_profile.tags).clone();
|
||||
tags.extend((*self.device.tags).clone());
|
||||
tags
|
||||
},
|
||||
|
@ -294,7 +294,7 @@ impl TxAck {
|
||||
let dp = self.device_profile.as_ref().unwrap();
|
||||
let dev = self.device.as_ref().unwrap();
|
||||
|
||||
let mut tags = (&*dp.tags).clone();
|
||||
let mut tags = (*dp.tags).clone();
|
||||
tags.extend((*dev.tags).clone());
|
||||
|
||||
let pl = integration_pb::LogEvent {
|
||||
@ -330,7 +330,7 @@ impl TxAck {
|
||||
let dev = self.device.as_ref().unwrap();
|
||||
let qi = self.device_queue_item.as_ref().unwrap();
|
||||
|
||||
let mut tags = (&*dp.tags).clone();
|
||||
let mut tags = (*dp.tags).clone();
|
||||
tags.extend((*dev.tags).clone());
|
||||
|
||||
let downlink_id = self.downlink_frame.as_ref().unwrap().downlink_id;
|
||||
|
@ -104,7 +104,7 @@ impl ToDateTime for Duration {
|
||||
let mut t = *GPS_EPOCH_TIME + *self;
|
||||
for ls in LEAP_SECONDS_TABLE.iter() {
|
||||
if ls.0 < t {
|
||||
t = t - ls.1;
|
||||
t -= ls.1;
|
||||
}
|
||||
}
|
||||
t
|
||||
|
@ -23,7 +23,7 @@ pub async fn handle(
|
||||
dev: &device::Device,
|
||||
block: &lrwn::MACCommandSet,
|
||||
) -> Result<Option<lrwn::MACCommandSet>> {
|
||||
let mac = (&**block)
|
||||
let mac = (**block)
|
||||
.first()
|
||||
.ok_or_else(|| anyhow!("Expected DevStatusAns"))?;
|
||||
if let lrwn::MACCommand::DevStatusAns(pl) = mac {
|
||||
@ -42,7 +42,7 @@ pub async fn handle(
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut tags = (&*dp.tags).clone();
|
||||
let mut tags = (*dp.tags).clone();
|
||||
tags.clone_from(&*dev.tags);
|
||||
|
||||
let rx_time: DateTime<Utc> =
|
||||
|
@ -6,7 +6,7 @@ pub async fn handle(
|
||||
dev: &device::Device,
|
||||
block: &lrwn::MACCommandSet,
|
||||
) -> Result<Option<lrwn::MACCommandSet>> {
|
||||
let mac = (&**block)
|
||||
let mac = (**block)
|
||||
.first()
|
||||
.ok_or_else(|| anyhow!("Expected DeviceModeInd"))?;
|
||||
if let lrwn::MACCommand::DeviceModeInd(pl) = mac {
|
||||
|
@ -13,7 +13,7 @@ pub fn handle(
|
||||
dev: &device::Device,
|
||||
block: &lrwn::MACCommandSet,
|
||||
) -> Result<Option<lrwn::MACCommandSet>> {
|
||||
let _ = (&**block)
|
||||
let _ = (**block)
|
||||
.first()
|
||||
.ok_or_else(|| anyhow!("Expected DeviceTimeReq"))?;
|
||||
|
||||
|
@ -11,7 +11,7 @@ pub fn handle(
|
||||
dev: &device::Device,
|
||||
block: &lrwn::MACCommandSet,
|
||||
) -> Result<Option<lrwn::MACCommandSet>> {
|
||||
let _ = (&**block)
|
||||
let _ = (**block)
|
||||
.first()
|
||||
.ok_or_else(|| anyhow!("Expected LinkCheckReq"));
|
||||
|
||||
|
@ -9,7 +9,7 @@ pub fn handle(
|
||||
ds: &mut internal::DeviceSession,
|
||||
block: &lrwn::MACCommandSet,
|
||||
) -> Result<Option<lrwn::MACCommandSet>> {
|
||||
let mac = (&**block)
|
||||
let mac = (**block)
|
||||
.first()
|
||||
.ok_or_else(|| anyhow!("MACCommandSet is empty"))?;
|
||||
|
||||
|
@ -23,10 +23,10 @@ pub fn handle(
|
||||
return Err(anyhow!("Pending RejoinParamSetupReq expected"));
|
||||
}
|
||||
|
||||
let ans_mac = (&**block)
|
||||
let ans_mac = (**block)
|
||||
.first()
|
||||
.ok_or_else(|| anyhow!("MACCommandSet is empty"))?;
|
||||
let req_mac = (&**pending.unwrap())
|
||||
let req_mac = (**pending.unwrap())
|
||||
.first()
|
||||
.ok_or_else(|| anyhow!("MACCommandSet is empty"))?;
|
||||
|
||||
|
@ -9,7 +9,7 @@ pub fn handle(
|
||||
dev: &device::Device,
|
||||
block: &lrwn::MACCommandSet,
|
||||
) -> Result<Option<lrwn::MACCommandSet>> {
|
||||
let block_mac = (&**block)
|
||||
let block_mac = (**block)
|
||||
.first()
|
||||
.ok_or_else(|| anyhow!("MACCommandSet is empty"))?;
|
||||
|
||||
|
@ -12,7 +12,7 @@ pub fn handle(
|
||||
ds: &mut internal::DeviceSession,
|
||||
block: &lrwn::MACCommandSet,
|
||||
) -> Result<Option<lrwn::MACCommandSet>> {
|
||||
let block_mac = (&**block)
|
||||
let block_mac = (**block)
|
||||
.first()
|
||||
.ok_or_else(|| anyhow!("MACCommandSet is empty"))?;
|
||||
let block_pl = if let lrwn::MACCommand::ResetInd(pl) = block_mac {
|
||||
|
@ -27,10 +27,10 @@ pub fn handle(
|
||||
return Err(anyhow!("Expected pending RxParamSetupReq"));
|
||||
}
|
||||
|
||||
let req_mac = (&**pending.unwrap())
|
||||
let req_mac = (**pending.unwrap())
|
||||
.first()
|
||||
.ok_or_else(|| anyhow!("MACCommandSet is empty"))?;
|
||||
let ans_mac = (&**block)
|
||||
let ans_mac = (**block)
|
||||
.first()
|
||||
.ok_or_else(|| anyhow!("MACCommandSet is empty"))?;
|
||||
|
||||
|
@ -20,7 +20,7 @@ pub fn handle(
|
||||
return Err(anyhow!("Pending RxTimingSetupReq expected"));
|
||||
}
|
||||
|
||||
let req_mac = (&**pending.unwrap())
|
||||
let req_mac = (**pending.unwrap())
|
||||
.first()
|
||||
.ok_or_else(|| anyhow!("MACCommandSet is empty"))?;
|
||||
|
||||
|
@ -38,7 +38,7 @@ pub fn handle(
|
||||
return Err(anyhow!("Expected pending TxParamSetupReq"));
|
||||
}
|
||||
|
||||
let req_mac = (&**pending.unwrap())
|
||||
let req_mac = (**pending.unwrap())
|
||||
.first()
|
||||
.ok_or_else(|| anyhow!("MACCommandSet is empty"))?;
|
||||
|
||||
|
@ -51,7 +51,7 @@ async fn main() -> Result<()> {
|
||||
.arg(
|
||||
Arg::with_name("config-dir")
|
||||
.required(true)
|
||||
.short("c")
|
||||
.short('c')
|
||||
.long("config-dir")
|
||||
.value_name("DIR")
|
||||
.multiple(false)
|
||||
@ -79,7 +79,7 @@ async fn main() -> Result<()> {
|
||||
.arg(
|
||||
Arg::with_name("dir")
|
||||
.required(true)
|
||||
.short("d")
|
||||
.short('d')
|
||||
.long("dir")
|
||||
.value_name("DIR")
|
||||
.multiple(false)
|
||||
|
@ -10,7 +10,7 @@ use super::error::Error;
|
||||
use super::schema::api_key;
|
||||
use super::{error, get_db_conn};
|
||||
|
||||
#[derive(Queryable, Insertable, PartialEq, Debug)]
|
||||
#[derive(Queryable, Insertable, PartialEq, Eq, Debug)]
|
||||
#[diesel(table_name = api_key)]
|
||||
pub struct ApiKey {
|
||||
pub id: Uuid,
|
||||
|
@ -19,7 +19,7 @@ use super::error::Error;
|
||||
use super::get_db_conn;
|
||||
use super::schema::{application, application_integration};
|
||||
|
||||
#[derive(Clone, Queryable, Insertable, PartialEq, Debug)]
|
||||
#[derive(Clone, Queryable, Insertable, PartialEq, Eq, Debug)]
|
||||
#[diesel(table_name = application)]
|
||||
pub struct Application {
|
||||
pub id: Uuid,
|
||||
@ -62,7 +62,7 @@ pub struct Filters {
|
||||
pub search: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Queryable, PartialEq, Debug)]
|
||||
#[derive(Queryable, PartialEq, Eq, Debug)]
|
||||
pub struct ApplicationListItem {
|
||||
pub id: Uuid,
|
||||
pub created_at: DateTime<Utc>,
|
||||
@ -134,7 +134,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, AsExpression, FromSqlRow, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, AsExpression, FromSqlRow, Serialize, Deserialize)]
|
||||
#[diesel(sql_type = Jsonb)]
|
||||
pub enum IntegrationConfiguration {
|
||||
None,
|
||||
@ -164,14 +164,14 @@ impl serialize::ToSql<Jsonb, Pg> for IntegrationConfiguration {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct HttpConfiguration {
|
||||
pub headers: HashMap<String, String>,
|
||||
pub json: bool,
|
||||
pub event_endpoint_url: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct InfluxDbConfiguration {
|
||||
pub endpoint: String,
|
||||
pub db: String,
|
||||
@ -185,22 +185,22 @@ pub struct InfluxDbConfiguration {
|
||||
pub bucket: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct ThingsBoardConfiguration {
|
||||
pub server: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct MyDevicesConfiguration {
|
||||
pub endpoint: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct LoraCloudConfiguration {
|
||||
pub modem_geolocation_services: LoraCloudModemGeolocationServices,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct LoraCloudModemGeolocationServices {
|
||||
pub token: String,
|
||||
pub modem_enabled: bool,
|
||||
@ -219,7 +219,7 @@ pub struct LoraCloudModemGeolocationServices {
|
||||
pub geolocation_wifi_payload_field: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct GcpPubSubConfiguration {
|
||||
pub encoding: i32,
|
||||
pub credentials_file: String,
|
||||
@ -227,7 +227,7 @@ pub struct GcpPubSubConfiguration {
|
||||
pub topic_name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct AwsSnsConfiguration {
|
||||
pub encoding: i32,
|
||||
pub region: String,
|
||||
@ -236,26 +236,26 @@ pub struct AwsSnsConfiguration {
|
||||
pub topic_arn: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct AzureServiceBusConfiguration {
|
||||
pub encoding: i32,
|
||||
pub connection_string: String,
|
||||
pub publish_name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct PilotThingsConfiguration {
|
||||
pub server: String,
|
||||
pub token: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct IftttConfiguration {
|
||||
pub key: String,
|
||||
pub uplink_values: [String; 2], // The first value is reserved for the DevEUI
|
||||
}
|
||||
|
||||
#[derive(Clone, Queryable, Insertable, PartialEq, Debug)]
|
||||
#[derive(Clone, Queryable, Insertable, PartialEq, Eq, Debug)]
|
||||
#[diesel(table_name = application_integration)]
|
||||
pub struct Integration {
|
||||
pub application_id: Uuid,
|
||||
|
@ -82,7 +82,7 @@ impl Default for Device {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Queryable, PartialEq, Debug)]
|
||||
#[derive(Queryable, PartialEq, Eq, Debug)]
|
||||
pub struct DeviceListItem {
|
||||
pub dev_eui: EUI64,
|
||||
pub name: String,
|
||||
@ -104,7 +104,7 @@ pub struct Filters {
|
||||
pub search: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(QueryableByName, PartialEq, Debug)]
|
||||
#[derive(QueryableByName, PartialEq, Eq, Debug)]
|
||||
pub struct DevicesActiveInactive {
|
||||
#[diesel(sql_type = diesel::sql_types::BigInt)]
|
||||
pub never_seen_count: i64,
|
||||
@ -114,7 +114,7 @@ pub struct DevicesActiveInactive {
|
||||
pub inactive_count: i64,
|
||||
}
|
||||
|
||||
#[derive(Queryable, PartialEq, Debug)]
|
||||
#[derive(Queryable, PartialEq, Eq, Debug)]
|
||||
pub struct DevicesDataRate {
|
||||
pub dr: Option<i16>, // as the column is nullable
|
||||
pub count: i64,
|
||||
|
@ -16,7 +16,7 @@ use super::{error, fields, get_db_conn};
|
||||
use crate::codec::Codec;
|
||||
use chirpstack_api::internal;
|
||||
|
||||
#[derive(Clone, Queryable, Insertable, Debug, PartialEq)]
|
||||
#[derive(Clone, Queryable, Insertable, Debug, PartialEq, Eq)]
|
||||
#[diesel(table_name = device_profile)]
|
||||
pub struct DeviceProfile {
|
||||
pub id: Uuid,
|
||||
@ -122,7 +122,7 @@ impl DeviceProfile {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Queryable, PartialEq, Debug)]
|
||||
#[derive(Queryable, PartialEq, Eq, Debug)]
|
||||
pub struct DeviceProfileListItem {
|
||||
pub id: Uuid,
|
||||
pub created_at: DateTime<Utc>,
|
||||
|
@ -15,7 +15,7 @@ use super::schema::device_profile_template;
|
||||
use super::{error, fields, get_db_conn};
|
||||
use crate::codec::Codec;
|
||||
|
||||
#[derive(Clone, Queryable, Insertable, Debug, PartialEq)]
|
||||
#[derive(Clone, Queryable, Insertable, Debug, PartialEq, Eq)]
|
||||
#[diesel(table_name = device_profile_template)]
|
||||
pub struct DeviceProfileTemplate {
|
||||
pub id: String,
|
||||
@ -114,7 +114,7 @@ impl Default for DeviceProfileTemplate {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Queryable, PartialEq, Debug)]
|
||||
#[derive(Queryable, PartialEq, Eq, Debug)]
|
||||
pub struct DeviceProfileTemplateListItem {
|
||||
pub id: String,
|
||||
pub created_at: DateTime<Utc>,
|
||||
|
@ -10,7 +10,7 @@ use super::get_db_conn;
|
||||
use super::schema::device_queue_item;
|
||||
use lrwn::EUI64;
|
||||
|
||||
#[derive(Queryable, Insertable, PartialEq, Debug, Clone)]
|
||||
#[derive(Queryable, Insertable, PartialEq, Eq, Debug, Clone)]
|
||||
#[diesel(table_name = device_queue_item)]
|
||||
pub struct DeviceQueueItem {
|
||||
pub id: Uuid,
|
||||
|
@ -7,7 +7,7 @@ use diesel::sql_types::Jsonb;
|
||||
use diesel::{deserialize, serialize};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, AsExpression, FromSqlRow)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, AsExpression, FromSqlRow)]
|
||||
#[diesel(sql_type = Jsonb)]
|
||||
pub struct KeyValue(HashMap<String, String>);
|
||||
|
||||
@ -51,7 +51,7 @@ impl serialize::ToSql<Jsonb, Pg> for KeyValue {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, AsExpression, FromSqlRow, PartialEq)]
|
||||
#[derive(Debug, Clone, AsExpression, FromSqlRow, PartialEq, Eq)]
|
||||
#[diesel(sql_type = Jsonb)]
|
||||
pub struct Measurements(HashMap<String, Measurement>);
|
||||
|
||||
@ -95,7 +95,7 @@ impl serialize::ToSql<Jsonb, Pg> for Measurements {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||
pub struct Measurement {
|
||||
pub name: String,
|
||||
pub kind: MeasurementKind,
|
||||
@ -103,7 +103,7 @@ pub struct Measurement {
|
||||
|
||||
#[allow(clippy::upper_case_acronyms)]
|
||||
#[allow(non_camel_case_types)]
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq)]
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum MeasurementKind {
|
||||
// Unknown.
|
||||
UNKNOWN,
|
||||
|
@ -72,7 +72,7 @@ pub struct Filters {
|
||||
pub search: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(QueryableByName, PartialEq, Debug)]
|
||||
#[derive(QueryableByName, PartialEq, Eq, Debug)]
|
||||
pub struct GatewaysActiveInactive {
|
||||
#[diesel(sql_type = diesel::sql_types::BigInt)]
|
||||
pub never_seen_count: i64,
|
||||
|
@ -200,7 +200,7 @@ pub async fn get(
|
||||
while ts.le(&end) {
|
||||
timestamps.push(ts);
|
||||
keys.push(get_key(name, a, ts));
|
||||
ts = ts + ChronoDuration::hours(1);
|
||||
ts += ChronoDuration::hours(1);
|
||||
}
|
||||
}
|
||||
Aggregation::DAY => {
|
||||
|
@ -66,7 +66,7 @@ impl Default for MulticastGroup {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Queryable, PartialEq, Debug)]
|
||||
#[derive(Queryable, PartialEq, Eq, Debug)]
|
||||
pub struct MulticastGroupListItem {
|
||||
pub id: Uuid,
|
||||
pub created_at: DateTime<Utc>,
|
||||
@ -82,7 +82,7 @@ pub struct Filters {
|
||||
pub search: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Queryable, QueryableByName, Insertable, AsChangeset, Debug, PartialEq)]
|
||||
#[derive(Clone, Queryable, QueryableByName, Insertable, AsChangeset, Debug, PartialEq, Eq)]
|
||||
#[diesel(table_name = multicast_group_queue_item)]
|
||||
pub struct MulticastGroupQueueItem {
|
||||
pub id: Uuid,
|
||||
@ -448,11 +448,10 @@ pub async fn enqueue(
|
||||
true => {
|
||||
// Increment with margin as requesting the gateway to send the
|
||||
// downlink 'now' will result in a too late error from the gateway.
|
||||
scheduler_run_after_ts = scheduler_run_after_ts
|
||||
+ Duration::from_std(
|
||||
conf.network.scheduler.multicast_class_c_margin,
|
||||
)
|
||||
.unwrap();
|
||||
scheduler_run_after_ts += Duration::from_std(
|
||||
conf.network.scheduler.multicast_class_c_margin,
|
||||
)
|
||||
.unwrap();
|
||||
Some(scheduler_run_after_ts.to_gps_time().num_milliseconds())
|
||||
}
|
||||
};
|
||||
@ -478,11 +477,10 @@ pub async fn enqueue(
|
||||
|
||||
if !conf.network.scheduler.multicast_class_c_use_gps_time {
|
||||
// Increment timing for each gateway to avoid colissions.
|
||||
scheduler_run_after_ts = scheduler_run_after_ts
|
||||
+ Duration::from_std(
|
||||
conf.network.scheduler.multicast_class_c_margin,
|
||||
)
|
||||
.unwrap();
|
||||
scheduler_run_after_ts += Duration::from_std(
|
||||
conf.network.scheduler.multicast_class_c_margin,
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -10,7 +10,7 @@ use super::error::Error;
|
||||
use super::get_db_conn;
|
||||
use super::schema::{tenant, tenant_user, user};
|
||||
|
||||
#[derive(Queryable, Insertable, PartialEq, Debug, Clone)]
|
||||
#[derive(Queryable, Insertable, PartialEq, Eq, Debug, Clone)]
|
||||
#[diesel(table_name = tenant)]
|
||||
pub struct Tenant {
|
||||
pub id: Uuid,
|
||||
@ -51,7 +51,7 @@ impl Default for Tenant {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Queryable, Insertable, AsChangeset, PartialEq, Debug)]
|
||||
#[derive(Queryable, Insertable, AsChangeset, PartialEq, Eq, Debug)]
|
||||
#[diesel(table_name = tenant_user)]
|
||||
pub struct TenantUser {
|
||||
pub tenant_id: Uuid,
|
||||
@ -79,7 +79,7 @@ impl Default for TenantUser {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Queryable, PartialEq, Debug)]
|
||||
#[derive(Queryable, PartialEq, Eq, Debug)]
|
||||
pub struct TenantUserListItem {
|
||||
pub tenant_id: Uuid,
|
||||
pub user_id: Uuid,
|
||||
|
@ -16,7 +16,7 @@ use super::error::Error;
|
||||
use super::get_db_conn;
|
||||
use super::schema::user;
|
||||
|
||||
#[derive(Queryable, Insertable, PartialEq, Debug, Clone)]
|
||||
#[derive(Queryable, Insertable, PartialEq, Eq, Debug, Clone)]
|
||||
#[diesel(table_name = user)]
|
||||
pub struct User {
|
||||
pub id: Uuid,
|
||||
@ -248,9 +248,10 @@ pub async fn list(limit: i64, offset: i64) -> Result<Vec<User>, Error> {
|
||||
// https://github.com/P-H-C/phc-string-format/blob/master/phc-sf-spec.md#specification
|
||||
fn hash_password(pw: &str, rounds: u32) -> Result<String, Error> {
|
||||
let salt = SaltString::generate(&mut OsRng);
|
||||
let hash_resp = Pbkdf2.hash_password(
|
||||
let hash_resp = Pbkdf2.hash_password_customized(
|
||||
pw.as_bytes(),
|
||||
Some(Algorithm::Pbkdf2Sha512.ident()),
|
||||
None,
|
||||
pbkdf2::Params {
|
||||
rounds,
|
||||
..Default::default()
|
||||
|
@ -218,7 +218,7 @@ impl Data {
|
||||
let dp = self.device_profile.as_ref().unwrap();
|
||||
let dev = self.device.as_ref().unwrap();
|
||||
|
||||
let mut tags = (&*dp.tags).clone();
|
||||
let mut tags = (*dp.tags).clone();
|
||||
tags.extend((*dev.tags).clone());
|
||||
|
||||
self.device_info = Some(integration_pb::DeviceInfo {
|
||||
@ -807,7 +807,7 @@ impl Data {
|
||||
|
||||
device_queue::delete_item(&qi.id).await?;
|
||||
|
||||
let mut tags = (&*dp.tags).clone();
|
||||
let mut tags = (*dp.tags).clone();
|
||||
tags.extend((*dev.tags).clone());
|
||||
|
||||
integration::ack_event(
|
||||
|
@ -193,7 +193,7 @@ impl JoinRequest {
|
||||
let dp = self.device_profile.as_ref().unwrap();
|
||||
let dev = self.device.as_ref().unwrap();
|
||||
|
||||
let mut tags = (&*dp.tags).clone();
|
||||
let mut tags = (*dp.tags).clone();
|
||||
tags.extend((*dev.tags).clone());
|
||||
|
||||
self.device_info = Some(integration_pb::DeviceInfo {
|
||||
|
@ -171,7 +171,7 @@ impl JoinRequest {
|
||||
let dp = self.device_profile.as_ref().unwrap();
|
||||
let dev = self.device.as_ref().unwrap();
|
||||
|
||||
let mut tags = (&*dp.tags).clone();
|
||||
let mut tags = (*dp.tags).clone();
|
||||
tags.extend((*dev.tags).clone());
|
||||
|
||||
self.device_info = Some(integration_pb::DeviceInfo {
|
||||
|
Reference in New Issue
Block a user