Refactor models names
This commit is contained in:
parent
76b14ce75b
commit
be7f31906c
24 changed files with 461 additions and 340 deletions
|
@ -13,3 +13,48 @@ file = "stdout"
|
||||||
driver = "gpio"
|
driver = "gpio"
|
||||||
pin = 5
|
pin = 5
|
||||||
inverted = 1
|
inverted = 1
|
||||||
|
|
||||||
|
[[relays]]
|
||||||
|
driver = "gpio"
|
||||||
|
pin = 4
|
||||||
|
inverted = 1
|
||||||
|
|
||||||
|
[[relays]]
|
||||||
|
driver = "gpio"
|
||||||
|
pin = 3
|
||||||
|
inverted = 1
|
||||||
|
|
||||||
|
[[relays]]
|
||||||
|
driver = "gpio"
|
||||||
|
pin = 2
|
||||||
|
inverted = 1
|
||||||
|
|
||||||
|
[[relays]]
|
||||||
|
driver = "gpio"
|
||||||
|
pin = 1
|
||||||
|
inverted = 1
|
||||||
|
|
||||||
|
[[relays]]
|
||||||
|
driver = "gpio"
|
||||||
|
pin = 0
|
||||||
|
inverted = 1
|
||||||
|
|
||||||
|
[[relays]]
|
||||||
|
driver = "gpio"
|
||||||
|
pin = 16
|
||||||
|
inverted = 1
|
||||||
|
|
||||||
|
[[relays]]
|
||||||
|
driver = "gpio"
|
||||||
|
pin = 15
|
||||||
|
inverted = 1
|
||||||
|
|
||||||
|
[[relays]]
|
||||||
|
driver = "piface"
|
||||||
|
pin = 1
|
||||||
|
inverted = 0
|
||||||
|
|
||||||
|
[[relays]]
|
||||||
|
driver = "piface"
|
||||||
|
pin = 0
|
||||||
|
inverted = 0
|
||||||
|
|
|
@ -2,17 +2,19 @@ use serde::{Deserialize, Deserializer};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy)]
|
||||||
pub enum Driver {
|
pub enum Driver {
|
||||||
Gpio,
|
Gpio,
|
||||||
Piface,
|
Piface,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'de> Deserialize<'de> for Driver {
|
impl<'de> Deserialize<'de> for Driver {
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de> {
|
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||||
match String::deserialize(deserializer)?.as_str() {
|
where
|
||||||
"gpio" => Ok(Driver::Gpio),
|
D: Deserializer<'de>,
|
||||||
"piface" => Ok(Driver::Piface),
|
{
|
||||||
_ => Err(serde::de::Error::custom("invalid driver")),
|
match String::deserialize(deserializer)?.as_str() {
|
||||||
}
|
"gpio" => Ok(Driver::Gpio),
|
||||||
|
"piface" => Ok(Driver::Piface),
|
||||||
}
|
_ => Err(serde::de::Error::custom("invalid driver")),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
|
@ -1,32 +1,51 @@
|
||||||
use std::str;
|
use std::str;
|
||||||
|
|
||||||
use futures::{future, pin_mut, SinkExt, StreamExt};
|
use crate::relay_loop::run_relay_loop;
|
||||||
|
use crate::settings::Settings;
|
||||||
|
use emgauwa_lib::db::errors::DatabaseError;
|
||||||
|
use emgauwa_lib::db::{DbController, DbRelay};
|
||||||
|
use emgauwa_lib::types::ControllerUid;
|
||||||
|
use emgauwa_lib::{db, models};
|
||||||
use futures::channel::mpsc;
|
use futures::channel::mpsc;
|
||||||
|
use futures::{future, pin_mut, SinkExt, StreamExt};
|
||||||
use sqlx::pool::PoolConnection;
|
use sqlx::pool::PoolConnection;
|
||||||
use sqlx::Sqlite;
|
use sqlx::Sqlite;
|
||||||
use tokio::io::AsyncReadExt;
|
use tokio::io::AsyncReadExt;
|
||||||
use tokio_tungstenite::{connect_async, tungstenite::protocol::Message};
|
|
||||||
use tokio_tungstenite::tungstenite::Error;
|
use tokio_tungstenite::tungstenite::Error;
|
||||||
use emgauwa_lib::db;
|
use tokio_tungstenite::{connect_async, tungstenite::protocol::Message};
|
||||||
use emgauwa_lib::db::Controller;
|
|
||||||
use emgauwa_lib::db::types::ControllerUid;
|
|
||||||
use crate::relay_loop::run_relay_loop;
|
|
||||||
use crate::settings::Settings;
|
|
||||||
|
|
||||||
mod settings;
|
|
||||||
mod driver;
|
mod driver;
|
||||||
mod relay_loop;
|
mod relay_loop;
|
||||||
|
mod settings;
|
||||||
|
|
||||||
fn create_this_controller(conn: &mut PoolConnection<Sqlite>, settings: &Settings) -> Controller {
|
async fn create_this_controller(
|
||||||
futures::executor::block_on(async {
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
Controller::create(
|
settings: &Settings,
|
||||||
conn,
|
) -> DbController {
|
||||||
&ControllerUid::new(),
|
DbController::create(
|
||||||
&settings.name,
|
conn,
|
||||||
i64::try_from(settings.relays.len()).expect("Too many relays"),
|
&ControllerUid::default(),
|
||||||
true
|
&settings.name,
|
||||||
).await.expect("Failed to create controller")
|
i64::try_from(settings.relays.len()).expect("Too many relays"),
|
||||||
})
|
true,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to create controller")
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn create_this_relay(
|
||||||
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
|
this_controller: &DbController,
|
||||||
|
settings_relay: &settings::Relay,
|
||||||
|
) -> DbRelay {
|
||||||
|
DbRelay::create(
|
||||||
|
conn,
|
||||||
|
&settings_relay.name,
|
||||||
|
settings_relay.number.unwrap(),
|
||||||
|
this_controller,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("Failed to create relay")
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
|
@ -37,22 +56,60 @@ async fn main() {
|
||||||
|
|
||||||
let mut conn = pool.acquire().await.unwrap();
|
let mut conn = pool.acquire().await.unwrap();
|
||||||
|
|
||||||
let this = Controller::get_all(&mut conn)
|
let db_controller = DbController::get_all(&mut conn)
|
||||||
.await
|
.await
|
||||||
.expect("Failed to get controller from database")
|
.expect("Failed to get controller from database")
|
||||||
.pop()
|
.pop()
|
||||||
.unwrap_or_else(|| create_this_controller(&mut conn, &settings));
|
.unwrap_or_else(|| {
|
||||||
|
futures::executor::block_on(create_this_controller(&mut conn, &settings))
|
||||||
|
});
|
||||||
|
|
||||||
|
let db_relays: Vec<DbRelay> = settings
|
||||||
|
.relays
|
||||||
|
.iter()
|
||||||
|
.map(|relay| {
|
||||||
|
futures::executor::block_on(async {
|
||||||
|
match DbRelay::get_by_controller_and_num(
|
||||||
|
&mut conn,
|
||||||
|
&db_controller,
|
||||||
|
relay.number.unwrap(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(relay) => relay,
|
||||||
|
Err(err) => match err {
|
||||||
|
DatabaseError::NotFound => {
|
||||||
|
create_this_relay(&mut conn, &db_controller, relay).await
|
||||||
|
}
|
||||||
|
_ => panic!("Failed to get relay from database"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let db_controller = db_controller
|
||||||
|
.update(&mut conn, &db_controller.name, db_relays.len() as i64, true)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let relays = db_relays
|
||||||
|
.into_iter()
|
||||||
|
.map(|relay| models::Relay::from_db_relay(relay, &mut conn))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let this = models::Controller {
|
||||||
|
controller: db_controller,
|
||||||
|
relays,
|
||||||
|
};
|
||||||
|
|
||||||
let this_json = serde_json::to_string(&this).unwrap();
|
let this_json = serde_json::to_string(&this).unwrap();
|
||||||
|
|
||||||
println!("{:?}", settings.relays);
|
|
||||||
println!("{:?}", this);
|
|
||||||
println!("{}", this_json);
|
println!("{}", this_json);
|
||||||
|
|
||||||
let url = format!(
|
let url = format!(
|
||||||
"ws://{}:{}/api/v1/ws/controllers",
|
"ws://{}:{}/api/v1/ws/controllers",
|
||||||
settings.core.host,
|
settings.core.host, settings.core.port
|
||||||
settings.core.port
|
|
||||||
);
|
);
|
||||||
|
|
||||||
let (stdin_tx, stdin_rx) = mpsc::unbounded();
|
let (stdin_tx, stdin_rx) = mpsc::unbounded();
|
||||||
|
@ -83,13 +140,14 @@ async fn read_stdin(tx: mpsc::UnboundedSender<Message>) {
|
||||||
Ok(n) => n,
|
Ok(n) => n,
|
||||||
};
|
};
|
||||||
buf.truncate(n);
|
buf.truncate(n);
|
||||||
tx.unbounded_send(Message::text(str::from_utf8(&buf).unwrap())).unwrap();
|
tx.unbounded_send(Message::text(str::from_utf8(&buf).unwrap()))
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn handle_message(message_result: Result<Message, Error>) {
|
pub async fn handle_message(message_result: Result<Message, Error>) {
|
||||||
match message_result {
|
match message_result {
|
||||||
Ok(message) => println!("{}", message.into_text().unwrap()),
|
Ok(message) => println!("{}", message.into_text().unwrap()),
|
||||||
Err(err) => println!("Error: {}", err)
|
Err(err) => println!("Error: {}", err),
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -1,9 +1,8 @@
|
||||||
use chrono::Local;
|
|
||||||
use crate::settings::Settings;
|
use crate::settings::Settings;
|
||||||
|
use chrono::Local;
|
||||||
|
|
||||||
#[allow(unused_variables)]
|
#[allow(unused_variables)]
|
||||||
pub async fn run_relay_loop(settings: Settings) {
|
pub async fn run_relay_loop(settings: Settings) {
|
||||||
let next_timestamp = Local::now().naive_local();
|
let next_timestamp = Local::now().naive_local();
|
||||||
loop {
|
loop {}
|
||||||
}
|
|
||||||
}
|
}
|
|
@ -1,5 +1,6 @@
|
||||||
use emgauwa_lib::{constants, utils};
|
use emgauwa_lib::{constants, utils};
|
||||||
use serde_derive::Deserialize;
|
use serde_derive::Deserialize;
|
||||||
|
|
||||||
use crate::driver::Driver;
|
use crate::driver::Driver;
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize)]
|
#[derive(Clone, Debug, Deserialize)]
|
||||||
|
@ -23,6 +24,8 @@ pub struct Logging {
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
pub struct Relay {
|
pub struct Relay {
|
||||||
pub driver: Driver,
|
pub driver: Driver,
|
||||||
|
pub name: String,
|
||||||
|
pub number: Option<i64>,
|
||||||
pub pin: u8,
|
pub pin: u8,
|
||||||
pub inverted: bool,
|
pub inverted: bool,
|
||||||
}
|
}
|
||||||
|
@ -54,6 +57,8 @@ impl Default for Relay {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Relay {
|
Relay {
|
||||||
driver: Driver::Gpio,
|
driver: Driver::Gpio,
|
||||||
|
number: None,
|
||||||
|
name: String::from("Relay"),
|
||||||
pin: 0,
|
pin: 0,
|
||||||
inverted: false,
|
inverted: false,
|
||||||
}
|
}
|
||||||
|
@ -79,5 +84,13 @@ impl Default for Logging {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn init() -> Settings {
|
pub fn init() -> Settings {
|
||||||
utils::load_settings("controller", "CONTROLLER")
|
let mut settings: Settings = utils::load_settings("controller", "CONTROLLER");
|
||||||
|
|
||||||
|
for (num, relay) in settings.relays.iter_mut().enumerate() {
|
||||||
|
if relay.number.is_none() {
|
||||||
|
relay.number = Some(num as i64);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
settings
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use std::str::FromStr;
|
|
||||||
use actix_cors::Cors;
|
use actix_cors::Cors;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
use actix_web::middleware::TrailingSlash;
|
use actix_web::middleware::TrailingSlash;
|
||||||
use actix_web::{middleware, web, App, HttpServer};
|
use actix_web::{middleware, web, App, HttpServer};
|
||||||
|
@ -13,8 +13,8 @@ mod settings;
|
||||||
async fn main() -> std::io::Result<()> {
|
async fn main() -> std::io::Result<()> {
|
||||||
let settings = settings::init();
|
let settings = settings::init();
|
||||||
|
|
||||||
let log_level: LevelFilter = LevelFilter::from_str(&settings.logging.level)
|
let log_level: LevelFilter =
|
||||||
.expect("Error parsing log level.");
|
LevelFilter::from_str(&settings.logging.level).expect("Error parsing log level.");
|
||||||
trace!("Log level set to {:?}", log_level);
|
trace!("Log level set to {:?}", log_level);
|
||||||
|
|
||||||
SimpleLogger::new()
|
SimpleLogger::new()
|
||||||
|
@ -26,7 +26,6 @@ async fn main() -> std::io::Result<()> {
|
||||||
|
|
||||||
log::info!("Starting server on {}:{}", settings.host, settings.port);
|
log::info!("Starting server on {}:{}", settings.host, settings.port);
|
||||||
HttpServer::new(move || {
|
HttpServer::new(move || {
|
||||||
|
|
||||||
let cors = Cors::default()
|
let cors = Cors::default()
|
||||||
.allow_any_method()
|
.allow_any_method()
|
||||||
.allow_any_header()
|
.allow_any_header()
|
||||||
|
|
|
@ -5,11 +5,11 @@ use sqlx::pool::PoolConnection;
|
||||||
use sqlx::Sqlite;
|
use sqlx::Sqlite;
|
||||||
|
|
||||||
use crate::db::errors::DatabaseError;
|
use crate::db::errors::DatabaseError;
|
||||||
use crate::db::Tag;
|
use crate::db::DbTag;
|
||||||
use crate::db::types::ControllerUid;
|
use crate::types::ControllerUid;
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Clone)]
|
#[derive(Debug, Serialize, Clone)]
|
||||||
pub struct Controller {
|
pub struct DbController {
|
||||||
pub id: i64,
|
pub id: i64,
|
||||||
pub uid: ControllerUid,
|
pub uid: ControllerUid,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
@ -17,11 +17,11 @@ pub struct Controller {
|
||||||
pub active: bool,
|
pub active: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Controller {
|
impl DbController {
|
||||||
pub async fn get_all(
|
pub async fn get_all(
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
) -> Result<Vec<Controller>, DatabaseError> {
|
) -> Result<Vec<DbController>, DatabaseError> {
|
||||||
Ok(sqlx::query_as!(Controller, "SELECT * FROM controllers")
|
Ok(sqlx::query_as!(DbController, "SELECT * FROM controllers")
|
||||||
.fetch_all(conn.deref_mut())
|
.fetch_all(conn.deref_mut())
|
||||||
.await?)
|
.await?)
|
||||||
}
|
}
|
||||||
|
@ -29,12 +29,8 @@ impl Controller {
|
||||||
pub async fn get(
|
pub async fn get(
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
id: i64,
|
id: i64,
|
||||||
) -> Result<Controller, DatabaseError> {
|
) -> Result<DbController, DatabaseError> {
|
||||||
sqlx::query_as!(
|
sqlx::query_as!(DbController, "SELECT * FROM controllers WHERE id = ?", id)
|
||||||
Controller,
|
|
||||||
"SELECT * FROM controllers WHERE id = ?",
|
|
||||||
id
|
|
||||||
)
|
|
||||||
.fetch_optional(conn.deref_mut())
|
.fetch_optional(conn.deref_mut())
|
||||||
.await
|
.await
|
||||||
.map(|s| s.ok_or(DatabaseError::NotFound))?
|
.map(|s| s.ok_or(DatabaseError::NotFound))?
|
||||||
|
@ -43,9 +39,9 @@ impl Controller {
|
||||||
pub async fn get_by_uid(
|
pub async fn get_by_uid(
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
filter_uid: &ControllerUid,
|
filter_uid: &ControllerUid,
|
||||||
) -> Result<Controller, DatabaseError> {
|
) -> Result<DbController, DatabaseError> {
|
||||||
sqlx::query_as!(
|
sqlx::query_as!(
|
||||||
Controller,
|
DbController,
|
||||||
"SELECT * FROM controllers WHERE uid = ?",
|
"SELECT * FROM controllers WHERE uid = ?",
|
||||||
filter_uid
|
filter_uid
|
||||||
)
|
)
|
||||||
|
@ -56,9 +52,9 @@ impl Controller {
|
||||||
|
|
||||||
pub async fn get_by_tag(
|
pub async fn get_by_tag(
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
tag: &Tag,
|
tag: &DbTag,
|
||||||
) -> Result<Vec<Controller>, DatabaseError> {
|
) -> Result<Vec<DbController>, DatabaseError> {
|
||||||
Ok(sqlx::query_as!(Controller, "SELECT schedule.* FROM controllers AS schedule INNER JOIN junction_tag ON junction_tag.schedule_id = schedule.id WHERE junction_tag.tag_id = ?", tag.id)
|
Ok(sqlx::query_as!(DbController, "SELECT schedule.* FROM controllers AS schedule INNER JOIN junction_tag ON junction_tag.schedule_id = schedule.id WHERE junction_tag.tag_id = ?", tag.id)
|
||||||
.fetch_all(conn.deref_mut())
|
.fetch_all(conn.deref_mut())
|
||||||
.await?)
|
.await?)
|
||||||
}
|
}
|
||||||
|
@ -81,10 +77,10 @@ impl Controller {
|
||||||
new_uid: &ControllerUid,
|
new_uid: &ControllerUid,
|
||||||
new_name: &str,
|
new_name: &str,
|
||||||
new_relay_count: i64,
|
new_relay_count: i64,
|
||||||
new_active: bool
|
new_active: bool,
|
||||||
) -> Result<Controller, DatabaseError> {
|
) -> Result<DbController, DatabaseError> {
|
||||||
sqlx::query_as!(
|
sqlx::query_as!(
|
||||||
Controller,
|
DbController,
|
||||||
"INSERT INTO controllers (uid, name, relay_count, active) VALUES (?, ?, ?, ?) RETURNING *",
|
"INSERT INTO controllers (uid, name, relay_count, active) VALUES (?, ?, ?, ?) RETURNING *",
|
||||||
new_uid,
|
new_uid,
|
||||||
new_name,
|
new_name,
|
||||||
|
@ -101,8 +97,8 @@ impl Controller {
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
new_name: &str,
|
new_name: &str,
|
||||||
new_relay_count: i64,
|
new_relay_count: i64,
|
||||||
new_active: bool
|
new_active: bool,
|
||||||
) -> Result<Controller, DatabaseError> {
|
) -> Result<DbController, DatabaseError> {
|
||||||
sqlx::query!(
|
sqlx::query!(
|
||||||
"UPDATE controllers SET name = ?, relay_count = ?, active = ? WHERE id = ?",
|
"UPDATE controllers SET name = ?, relay_count = ?, active = ? WHERE id = ?",
|
||||||
new_name,
|
new_name,
|
||||||
|
|
|
@ -6,20 +6,19 @@ use std::str::FromStr;
|
||||||
|
|
||||||
use crate::db::errors::DatabaseError;
|
use crate::db::errors::DatabaseError;
|
||||||
use crate::db::model_utils::Period;
|
use crate::db::model_utils::Period;
|
||||||
use crate::db::types::ScheduleUid;
|
use crate::types::ScheduleUid;
|
||||||
|
|
||||||
|
mod controllers;
|
||||||
pub mod errors;
|
pub mod errors;
|
||||||
mod model_utils;
|
mod model_utils;
|
||||||
|
mod relays;
|
||||||
mod schedules;
|
mod schedules;
|
||||||
mod tag;
|
mod tag;
|
||||||
pub mod types;
|
|
||||||
mod controllers;
|
|
||||||
mod relays;
|
|
||||||
|
|
||||||
pub use controllers::Controller;
|
pub use controllers::DbController;
|
||||||
pub use relays::Relay;
|
pub use relays::DbRelay;
|
||||||
pub use schedules::{Periods, Schedule};
|
pub use schedules::{DbPeriods, DbSchedule};
|
||||||
pub use tag::Tag;
|
pub use tag::DbTag;
|
||||||
|
|
||||||
static MIGRATOR: Migrator = sqlx::migrate!("../migrations"); // defaults to "./migrations"
|
static MIGRATOR: Migrator = sqlx::migrate!("../migrations"); // defaults to "./migrations"
|
||||||
|
|
||||||
|
@ -32,16 +31,16 @@ async fn init_schedule(
|
||||||
pool: &Pool<Sqlite>,
|
pool: &Pool<Sqlite>,
|
||||||
uid: &ScheduleUid,
|
uid: &ScheduleUid,
|
||||||
name: &str,
|
name: &str,
|
||||||
periods: Periods,
|
periods: DbPeriods,
|
||||||
) -> Result<(), DatabaseError> {
|
) -> Result<(), DatabaseError> {
|
||||||
trace!("Initializing schedule {:?}", name);
|
trace!("Initializing schedule {:?}", name);
|
||||||
match Schedule::get_by_uid(&mut pool.acquire().await.unwrap(), uid).await {
|
match DbSchedule::get_by_uid(&mut pool.acquire().await.unwrap(), uid).await {
|
||||||
Ok(_) => Ok(()),
|
Ok(_) => Ok(()),
|
||||||
Err(err) => match err {
|
Err(err) => match err {
|
||||||
DatabaseError::NotFound => {
|
DatabaseError::NotFound => {
|
||||||
trace!("Schedule {:?} not found, inserting", name);
|
trace!("Schedule {:?} not found, inserting", name);
|
||||||
sqlx::query_as!(
|
sqlx::query_as!(
|
||||||
Schedule,
|
DbSchedule,
|
||||||
"INSERT INTO schedules (uid, name, periods) VALUES (?, ?, ?) RETURNING *",
|
"INSERT INTO schedules (uid, name, periods) VALUES (?, ?, ?) RETURNING *",
|
||||||
uid,
|
uid,
|
||||||
name,
|
name,
|
||||||
|
@ -71,7 +70,7 @@ pub async fn init(db: &str) -> Pool<Sqlite> {
|
||||||
|
|
||||||
run_migrations(&pool).await;
|
run_migrations(&pool).await;
|
||||||
|
|
||||||
init_schedule(&pool, &ScheduleUid::Off, "Off", Periods(vec![]))
|
init_schedule(&pool, &ScheduleUid::Off, "Off", DbPeriods(vec![]))
|
||||||
.await
|
.await
|
||||||
.expect("Error initializing schedule Off");
|
.expect("Error initializing schedule Off");
|
||||||
|
|
||||||
|
@ -79,7 +78,7 @@ pub async fn init(db: &str) -> Pool<Sqlite> {
|
||||||
&pool,
|
&pool,
|
||||||
&ScheduleUid::On,
|
&ScheduleUid::On,
|
||||||
"On",
|
"On",
|
||||||
Periods(vec![Period::new_on()]),
|
DbPeriods(vec![Period::new_on()]),
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.expect("Error initializing schedule On");
|
.expect("Error initializing schedule On");
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use crate::db::Periods;
|
use crate::db::DbPeriods;
|
||||||
use chrono::{NaiveTime, Timelike};
|
use chrono::{NaiveTime, Timelike};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::database::HasArguments;
|
use sqlx::database::HasArguments;
|
||||||
|
@ -51,7 +51,7 @@ impl Period {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Type<Sqlite> for Periods {
|
impl Type<Sqlite> for DbPeriods {
|
||||||
fn type_info() -> SqliteTypeInfo {
|
fn type_info() -> SqliteTypeInfo {
|
||||||
<&[u8] as Type<Sqlite>>::type_info()
|
<&[u8] as Type<Sqlite>>::type_info()
|
||||||
}
|
}
|
||||||
|
@ -61,22 +61,22 @@ impl Type<Sqlite> for Periods {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'q> Encode<'q, Sqlite> for Periods {
|
impl<'q> Encode<'q, Sqlite> for DbPeriods {
|
||||||
//noinspection DuplicatedCode
|
//noinspection DuplicatedCode
|
||||||
fn encode_by_ref(&self, buf: &mut <Sqlite as HasArguments<'q>>::ArgumentBuffer) -> IsNull {
|
fn encode_by_ref(&self, buf: &mut <Sqlite as HasArguments<'q>>::ArgumentBuffer) -> IsNull {
|
||||||
<&Vec<u8> as Encode<Sqlite>>::encode(&Vec::from(self), buf)
|
<&Vec<u8> as Encode<Sqlite>>::encode(&Vec::from(self), buf)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'r> Decode<'r, Sqlite> for Periods {
|
impl<'r> Decode<'r, Sqlite> for DbPeriods {
|
||||||
fn decode(value: SqliteValueRef<'r>) -> Result<Self, BoxDynError> {
|
fn decode(value: SqliteValueRef<'r>) -> Result<Self, BoxDynError> {
|
||||||
let blob = <&[u8] as Decode<Sqlite>>::decode(value)?;
|
let blob = <&[u8] as Decode<Sqlite>>::decode(value)?;
|
||||||
Ok(Periods::from(Vec::from(blob)))
|
Ok(DbPeriods::from(Vec::from(blob)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&Periods> for Vec<u8> {
|
impl From<&DbPeriods> for Vec<u8> {
|
||||||
fn from(periods: &Periods) -> Vec<u8> {
|
fn from(periods: &DbPeriods) -> Vec<u8> {
|
||||||
periods
|
periods
|
||||||
.0
|
.0
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -93,7 +93,7 @@ impl From<&Periods> for Vec<u8> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Vec<u8>> for Periods {
|
impl From<Vec<u8>> for DbPeriods {
|
||||||
fn from(value: Vec<u8>) -> Self {
|
fn from(value: Vec<u8>) -> Self {
|
||||||
let mut vec = Vec::new();
|
let mut vec = Vec::new();
|
||||||
for i in (3..value.len()).step_by(4) {
|
for i in (3..value.len()).step_by(4) {
|
||||||
|
@ -106,6 +106,6 @@ impl From<Vec<u8>> for Periods {
|
||||||
end: NaiveTime::from_hms_opt(end_val_h, end_val_m, 0).unwrap(),
|
end: NaiveTime::from_hms_opt(end_val_h, end_val_m, 0).unwrap(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
Periods(vec)
|
DbPeriods(vec)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,16 +1,15 @@
|
||||||
use serde_derive::Serialize;
|
use serde_derive::Serialize;
|
||||||
use std::ops::DerefMut;
|
use std::ops::DerefMut;
|
||||||
|
|
||||||
|
use crate::db::DbController;
|
||||||
use sqlx::pool::PoolConnection;
|
use sqlx::pool::PoolConnection;
|
||||||
use sqlx::Sqlite;
|
use sqlx::Sqlite;
|
||||||
use crate::db::Controller;
|
|
||||||
|
|
||||||
use crate::db::errors::DatabaseError;
|
use crate::db::errors::DatabaseError;
|
||||||
use crate::db::Tag;
|
use crate::db::DbTag;
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Clone)]
|
#[derive(Debug, Serialize, Clone, sqlx::FromRow)]
|
||||||
#[derive(sqlx::FromRow)]
|
pub struct DbRelay {
|
||||||
pub struct Relay {
|
|
||||||
#[serde(skip)]
|
#[serde(skip)]
|
||||||
pub id: i64,
|
pub id: i64,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
@ -19,34 +18,41 @@ pub struct Relay {
|
||||||
pub controller_id: i64,
|
pub controller_id: i64,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Relay {
|
impl DbRelay {
|
||||||
pub async fn get_all(
|
pub async fn get_all(conn: &mut PoolConnection<Sqlite>) -> Result<Vec<DbRelay>, DatabaseError> {
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
Ok(sqlx::query_as!(DbRelay, "SELECT * FROM relays")
|
||||||
) -> Result<Vec<Relay>, DatabaseError> {
|
|
||||||
Ok(sqlx::query_as!(Relay, "SELECT * FROM relays")
|
|
||||||
.fetch_all(conn.deref_mut())
|
.fetch_all(conn.deref_mut())
|
||||||
.await?)
|
.await?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get(
|
pub async fn get(conn: &mut PoolConnection<Sqlite>, id: i64) -> Result<DbRelay, DatabaseError> {
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
sqlx::query_as!(DbRelay, "SELECT * FROM relays WHERE id = ?", id)
|
||||||
id: i64,
|
|
||||||
) -> Result<Relay, DatabaseError> {
|
|
||||||
sqlx::query_as!(
|
|
||||||
Relay,
|
|
||||||
"SELECT * FROM relays WHERE id = ?",
|
|
||||||
id
|
|
||||||
)
|
|
||||||
.fetch_optional(conn.deref_mut())
|
.fetch_optional(conn.deref_mut())
|
||||||
.await
|
.await
|
||||||
.map(|s| s.ok_or(DatabaseError::NotFound))?
|
.map(|s| s.ok_or(DatabaseError::NotFound))?
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn get_by_controller_and_num(
|
||||||
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
|
controller: &DbController,
|
||||||
|
number: i64,
|
||||||
|
) -> Result<DbRelay, DatabaseError> {
|
||||||
|
sqlx::query_as!(
|
||||||
|
DbRelay,
|
||||||
|
"SELECT * FROM relays WHERE controller_id = ? AND number = ?",
|
||||||
|
controller.id,
|
||||||
|
number
|
||||||
|
)
|
||||||
|
.fetch_optional(conn.deref_mut())
|
||||||
|
.await
|
||||||
|
.map(|s| s.ok_or(DatabaseError::NotFound))?
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn get_by_tag(
|
pub async fn get_by_tag(
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
tag: &Tag,
|
tag: &DbTag,
|
||||||
) -> Result<Vec<Relay>, DatabaseError> {
|
) -> Result<Vec<DbRelay>, DatabaseError> {
|
||||||
Ok(sqlx::query_as!(Relay, "SELECT schedule.* FROM relays AS schedule INNER JOIN junction_tag ON junction_tag.schedule_id = schedule.id WHERE junction_tag.tag_id = ?", tag.id)
|
Ok(sqlx::query_as!(DbRelay, "SELECT schedule.* FROM relays AS schedule INNER JOIN junction_tag ON junction_tag.schedule_id = schedule.id WHERE junction_tag.tag_id = ?", tag.id)
|
||||||
.fetch_all(conn.deref_mut())
|
.fetch_all(conn.deref_mut())
|
||||||
.await?)
|
.await?)
|
||||||
}
|
}
|
||||||
|
@ -55,10 +61,10 @@ impl Relay {
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
new_name: &str,
|
new_name: &str,
|
||||||
new_number: i64,
|
new_number: i64,
|
||||||
new_controller: &Controller,
|
new_controller: &DbController,
|
||||||
) -> Result<Relay, DatabaseError> {
|
) -> Result<DbRelay, DatabaseError> {
|
||||||
sqlx::query_as!(
|
sqlx::query_as!(
|
||||||
Relay,
|
DbRelay,
|
||||||
"INSERT INTO relays (name, number, controller_id) VALUES (?, ?, ?) RETURNING *",
|
"INSERT INTO relays (name, number, controller_id) VALUES (?, ?, ?) RETURNING *",
|
||||||
new_name,
|
new_name,
|
||||||
new_number,
|
new_number,
|
||||||
|
@ -69,10 +75,7 @@ impl Relay {
|
||||||
.ok_or(DatabaseError::InsertGetError)
|
.ok_or(DatabaseError::InsertGetError)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete(
|
pub async fn delete(&self, conn: &mut PoolConnection<Sqlite>) -> Result<(), DatabaseError> {
|
||||||
&self,
|
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
|
||||||
) -> Result<(), DatabaseError> {
|
|
||||||
sqlx::query!("DELETE FROM relays WHERE id = ?", self.id)
|
sqlx::query!("DELETE FROM relays WHERE id = ?", self.id)
|
||||||
.execute(conn.deref_mut())
|
.execute(conn.deref_mut())
|
||||||
.await
|
.await
|
||||||
|
@ -87,8 +90,8 @@ impl Relay {
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
new_name: &str,
|
new_name: &str,
|
||||||
new_number: i64,
|
new_number: i64,
|
||||||
new_controller: &Controller,
|
new_controller: &DbController,
|
||||||
) -> Result<Relay, DatabaseError> {
|
) -> Result<DbRelay, DatabaseError> {
|
||||||
sqlx::query!(
|
sqlx::query!(
|
||||||
"UPDATE relays SET name = ?, number = ?, controller_id = ? WHERE id = ?",
|
"UPDATE relays SET name = ?, number = ?, controller_id = ? WHERE id = ?",
|
||||||
new_name,
|
new_name,
|
||||||
|
@ -99,14 +102,20 @@ impl Relay {
|
||||||
.execute(conn.deref_mut())
|
.execute(conn.deref_mut())
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
Relay::get(conn, self.id).await
|
DbRelay::get(conn, self.id).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_controller(&self, conn: &mut PoolConnection<Sqlite>) -> Result<Controller, DatabaseError> {
|
pub async fn get_controller(
|
||||||
Controller::get(conn, self.controller_id).await
|
&self,
|
||||||
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
|
) -> Result<DbController, DatabaseError> {
|
||||||
|
DbController::get(conn, self.controller_id).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_tags(&self, conn: &mut PoolConnection<Sqlite>) -> Result<Vec<String>, DatabaseError> {
|
pub async fn get_tags(
|
||||||
|
&self,
|
||||||
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
|
) -> Result<Vec<String>, DatabaseError> {
|
||||||
Ok(sqlx::query_scalar!("SELECT tag FROM tags INNER JOIN junction_tag ON junction_tag.tag_id = tags.id WHERE junction_tag.relay_id = ?", self.id)
|
Ok(sqlx::query_scalar!("SELECT tag FROM tags INNER JOIN junction_tag ON junction_tag.tag_id = tags.id WHERE junction_tag.relay_id = ?", self.id)
|
||||||
.fetch_all(conn.deref_mut())
|
.fetch_all(conn.deref_mut())
|
||||||
.await?)
|
.await?)
|
||||||
|
@ -122,7 +131,7 @@ impl Relay {
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
for new_tag in new_tags {
|
for new_tag in new_tags {
|
||||||
let tag: Tag = Tag::get_by_tag_or_create(conn, new_tag).await?;
|
let tag: DbTag = DbTag::get_by_tag_or_create(conn, new_tag).await?;
|
||||||
tag.link_relay(conn, self).await?;
|
tag.link_relay(conn, self).await?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -7,27 +7,27 @@ use sqlx::Sqlite;
|
||||||
|
|
||||||
use crate::db::errors::DatabaseError;
|
use crate::db::errors::DatabaseError;
|
||||||
use crate::db::model_utils::Period;
|
use crate::db::model_utils::Period;
|
||||||
use crate::db::Tag;
|
use crate::db::DbTag;
|
||||||
use crate::db::types::ScheduleUid;
|
use crate::types::ScheduleUid;
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Clone)]
|
#[derive(Debug, Serialize, Clone)]
|
||||||
pub struct Schedule {
|
pub struct DbSchedule {
|
||||||
#[serde(skip)]
|
#[serde(skip)]
|
||||||
pub id: i64,
|
pub id: i64,
|
||||||
#[serde(rename(serialize = "id"))]
|
#[serde(rename(serialize = "id"))]
|
||||||
pub uid: ScheduleUid,
|
pub uid: ScheduleUid,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub periods: Periods,
|
pub periods: DbPeriods,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
|
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
|
||||||
pub struct Periods(pub Vec<Period>);
|
pub struct DbPeriods(pub Vec<Period>);
|
||||||
|
|
||||||
impl Schedule {
|
impl DbSchedule {
|
||||||
pub async fn get_all(
|
pub async fn get_all(
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
) -> Result<Vec<Schedule>, DatabaseError> {
|
) -> Result<Vec<DbSchedule>, DatabaseError> {
|
||||||
Ok(sqlx::query_as!(Schedule, "SELECT * FROM schedules")
|
Ok(sqlx::query_as!(DbSchedule, "SELECT * FROM schedules")
|
||||||
.fetch_all(conn.deref_mut())
|
.fetch_all(conn.deref_mut())
|
||||||
.await?)
|
.await?)
|
||||||
}
|
}
|
||||||
|
@ -35,12 +35,8 @@ impl Schedule {
|
||||||
pub async fn get(
|
pub async fn get(
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
id: i64,
|
id: i64,
|
||||||
) -> Result<Schedule, DatabaseError> {
|
) -> Result<DbSchedule, DatabaseError> {
|
||||||
sqlx::query_as!(
|
sqlx::query_as!(DbSchedule, "SELECT * FROM schedules WHERE id = ?", id)
|
||||||
Schedule,
|
|
||||||
"SELECT * FROM schedules WHERE id = ?",
|
|
||||||
id
|
|
||||||
)
|
|
||||||
.fetch_optional(conn.deref_mut())
|
.fetch_optional(conn.deref_mut())
|
||||||
.await
|
.await
|
||||||
.map(|s| s.ok_or(DatabaseError::NotFound))?
|
.map(|s| s.ok_or(DatabaseError::NotFound))?
|
||||||
|
@ -49,9 +45,9 @@ impl Schedule {
|
||||||
pub async fn get_by_uid(
|
pub async fn get_by_uid(
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
filter_uid: &ScheduleUid,
|
filter_uid: &ScheduleUid,
|
||||||
) -> Result<Schedule, DatabaseError> {
|
) -> Result<DbSchedule, DatabaseError> {
|
||||||
sqlx::query_as!(
|
sqlx::query_as!(
|
||||||
Schedule,
|
DbSchedule,
|
||||||
"SELECT * FROM schedules WHERE uid = ?",
|
"SELECT * FROM schedules WHERE uid = ?",
|
||||||
filter_uid
|
filter_uid
|
||||||
)
|
)
|
||||||
|
@ -62,9 +58,9 @@ impl Schedule {
|
||||||
|
|
||||||
pub async fn get_by_tag(
|
pub async fn get_by_tag(
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
tag: &Tag,
|
tag: &DbTag,
|
||||||
) -> Result<Vec<Schedule>, DatabaseError> {
|
) -> Result<Vec<DbSchedule>, DatabaseError> {
|
||||||
Ok(sqlx::query_as!(Schedule, "SELECT schedule.* FROM schedules AS schedule INNER JOIN junction_tag ON junction_tag.schedule_id = schedule.id WHERE junction_tag.tag_id = ?", tag.id)
|
Ok(sqlx::query_as!(DbSchedule, "SELECT schedule.* FROM schedules AS schedule INNER JOIN junction_tag ON junction_tag.schedule_id = schedule.id WHERE junction_tag.tag_id = ?", tag.id)
|
||||||
.fetch_all(conn.deref_mut())
|
.fetch_all(conn.deref_mut())
|
||||||
.await?)
|
.await?)
|
||||||
}
|
}
|
||||||
|
@ -91,11 +87,11 @@ impl Schedule {
|
||||||
pub async fn create(
|
pub async fn create(
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
new_name: &str,
|
new_name: &str,
|
||||||
new_periods: &Periods,
|
new_periods: &DbPeriods,
|
||||||
) -> Result<Schedule, DatabaseError> {
|
) -> Result<DbSchedule, DatabaseError> {
|
||||||
let uid = ScheduleUid::default();
|
let uid = ScheduleUid::default();
|
||||||
sqlx::query_as!(
|
sqlx::query_as!(
|
||||||
Schedule,
|
DbSchedule,
|
||||||
"INSERT INTO schedules (uid, name, periods) VALUES (?, ?, ?) RETURNING *",
|
"INSERT INTO schedules (uid, name, periods) VALUES (?, ?, ?) RETURNING *",
|
||||||
uid,
|
uid,
|
||||||
new_name,
|
new_name,
|
||||||
|
@ -110,8 +106,8 @@ impl Schedule {
|
||||||
&self,
|
&self,
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
new_name: &str,
|
new_name: &str,
|
||||||
new_periods: &Periods,
|
new_periods: &DbPeriods,
|
||||||
) -> Result<Schedule, DatabaseError> {
|
) -> Result<DbSchedule, DatabaseError> {
|
||||||
// overwrite periods on protected schedules
|
// overwrite periods on protected schedules
|
||||||
let new_periods = match self.uid {
|
let new_periods = match self.uid {
|
||||||
ScheduleUid::Off | ScheduleUid::On => self.periods.borrow(),
|
ScheduleUid::Off | ScheduleUid::On => self.periods.borrow(),
|
||||||
|
@ -127,7 +123,7 @@ impl Schedule {
|
||||||
.execute(conn.deref_mut())
|
.execute(conn.deref_mut())
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
Schedule::get(conn, self.id).await
|
DbSchedule::get(conn, self.id).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_tags(
|
pub async fn get_tags(
|
||||||
|
@ -149,7 +145,7 @@ impl Schedule {
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
for new_tag in new_tags {
|
for new_tag in new_tags {
|
||||||
let tag: Tag = Tag::get_by_tag_or_create(conn, new_tag).await?;
|
let tag: DbTag = DbTag::get_by_tag_or_create(conn, new_tag).await?;
|
||||||
tag.link_schedule(conn, self).await?;
|
tag.link_schedule(conn, self).await?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -5,28 +5,28 @@ use sqlx::pool::PoolConnection;
|
||||||
use sqlx::Sqlite;
|
use sqlx::Sqlite;
|
||||||
|
|
||||||
use crate::db::errors::DatabaseError;
|
use crate::db::errors::DatabaseError;
|
||||||
use crate::db::{Relay, Schedule};
|
use crate::db::{DbRelay, DbSchedule};
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Clone)]
|
#[derive(Debug, Serialize, Clone)]
|
||||||
pub struct Tag {
|
pub struct DbTag {
|
||||||
pub id: i64,
|
pub id: i64,
|
||||||
pub tag: String,
|
pub tag: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct JunctionTag {
|
pub struct DbJunctionTag {
|
||||||
pub id: i64,
|
pub id: i64,
|
||||||
pub tag_id: i64,
|
pub tag_id: i64,
|
||||||
pub relay_id: Option<i64>,
|
pub relay_id: Option<i64>,
|
||||||
pub schedule_id: Option<i64>,
|
pub schedule_id: Option<i64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Tag {
|
impl DbTag {
|
||||||
pub async fn create(
|
pub async fn create(
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
new_tag: &str,
|
new_tag: &str,
|
||||||
) -> Result<Tag, DatabaseError> {
|
) -> Result<DbTag, DatabaseError> {
|
||||||
sqlx::query_as!(
|
sqlx::query_as!(
|
||||||
Tag,
|
DbTag,
|
||||||
"INSERT INTO tags (tag) VALUES (?) RETURNING *",
|
"INSERT INTO tags (tag) VALUES (?) RETURNING *",
|
||||||
new_tag
|
new_tag
|
||||||
)
|
)
|
||||||
|
@ -35,11 +35,8 @@ impl Tag {
|
||||||
.ok_or(DatabaseError::InsertGetError)
|
.ok_or(DatabaseError::InsertGetError)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get(
|
pub async fn get(conn: &mut PoolConnection<Sqlite>, id: i64) -> Result<DbTag, DatabaseError> {
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
sqlx::query_as!(DbTag, "SELECT * FROM tags WHERE id = ?", id)
|
||||||
id: i64,
|
|
||||||
) -> Result<Tag, DatabaseError> {
|
|
||||||
sqlx::query_as!(Tag, "SELECT * FROM tags WHERE id = ?", id)
|
|
||||||
.fetch_optional(conn.deref_mut())
|
.fetch_optional(conn.deref_mut())
|
||||||
.await
|
.await
|
||||||
.map(|t| t.ok_or(DatabaseError::NotFound))?
|
.map(|t| t.ok_or(DatabaseError::NotFound))?
|
||||||
|
@ -48,10 +45,10 @@ impl Tag {
|
||||||
pub async fn get_by_tag_or_create(
|
pub async fn get_by_tag_or_create(
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
target_tag: &str,
|
target_tag: &str,
|
||||||
) -> Result<Tag, DatabaseError> {
|
) -> Result<DbTag, DatabaseError> {
|
||||||
match Tag::get_by_tag(conn, target_tag).await {
|
match DbTag::get_by_tag(conn, target_tag).await {
|
||||||
Ok(tag) => Ok(tag),
|
Ok(tag) => Ok(tag),
|
||||||
Err(DatabaseError::NotFound) => Tag::create(conn, target_tag).await,
|
Err(DatabaseError::NotFound) => DbTag::create(conn, target_tag).await,
|
||||||
Err(e) => Err(e),
|
Err(e) => Err(e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -59,8 +56,8 @@ impl Tag {
|
||||||
pub async fn get_by_tag(
|
pub async fn get_by_tag(
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
target_tag: &str,
|
target_tag: &str,
|
||||||
) -> Result<Tag, DatabaseError> {
|
) -> Result<DbTag, DatabaseError> {
|
||||||
sqlx::query_as!(Tag, "SELECT * FROM tags WHERE tag = ?", target_tag)
|
sqlx::query_as!(DbTag, "SELECT * FROM tags WHERE tag = ?", target_tag)
|
||||||
.fetch_optional(conn.deref_mut())
|
.fetch_optional(conn.deref_mut())
|
||||||
.await
|
.await
|
||||||
.map(|t| t.ok_or(DatabaseError::NotFound))?
|
.map(|t| t.ok_or(DatabaseError::NotFound))?
|
||||||
|
@ -69,10 +66,10 @@ impl Tag {
|
||||||
pub async fn link_relay(
|
pub async fn link_relay(
|
||||||
&self,
|
&self,
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
target_relay: &Relay,
|
target_relay: &DbRelay,
|
||||||
) -> Result<JunctionTag, DatabaseError> {
|
) -> Result<DbJunctionTag, DatabaseError> {
|
||||||
sqlx::query_as!(
|
sqlx::query_as!(
|
||||||
JunctionTag,
|
DbJunctionTag,
|
||||||
"INSERT INTO junction_tag (tag_id, relay_id) VALUES (?, ?) RETURNING *",
|
"INSERT INTO junction_tag (tag_id, relay_id) VALUES (?, ?) RETURNING *",
|
||||||
self.id,
|
self.id,
|
||||||
target_relay.id
|
target_relay.id
|
||||||
|
@ -85,10 +82,10 @@ impl Tag {
|
||||||
pub async fn link_schedule(
|
pub async fn link_schedule(
|
||||||
&self,
|
&self,
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
target_schedule: &Schedule,
|
target_schedule: &DbSchedule,
|
||||||
) -> Result<JunctionTag, DatabaseError> {
|
) -> Result<DbJunctionTag, DatabaseError> {
|
||||||
sqlx::query_as!(
|
sqlx::query_as!(
|
||||||
JunctionTag,
|
DbJunctionTag,
|
||||||
"INSERT INTO junction_tag (tag_id, schedule_id) VALUES (?, ?) RETURNING *",
|
"INSERT INTO junction_tag (tag_id, schedule_id) VALUES (?, ?) RETURNING *",
|
||||||
self.id,
|
self.id,
|
||||||
target_schedule.id
|
target_schedule.id
|
||||||
|
|
|
@ -1,68 +0,0 @@
|
||||||
use serde::{Serialize, Serializer};
|
|
||||||
use sqlx::{Decode, Encode, Sqlite, Type};
|
|
||||||
use sqlx::database::HasArguments;
|
|
||||||
use sqlx::encode::IsNull;
|
|
||||||
use sqlx::error::BoxDynError;
|
|
||||||
use sqlx::sqlite::{SqliteTypeInfo, SqliteValueRef};
|
|
||||||
use uuid::Uuid;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct ControllerUid(Uuid);
|
|
||||||
|
|
||||||
impl ControllerUid {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self(Uuid::new_v4())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Serialize for ControllerUid {
|
|
||||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
|
||||||
where
|
|
||||||
S: Serializer,
|
|
||||||
{
|
|
||||||
String::from(self).serialize(serializer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&ControllerUid> for String {
|
|
||||||
fn from(uid: &ControllerUid) -> String {
|
|
||||||
uid.0.as_hyphenated().to_string()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Type<Sqlite> for ControllerUid {
|
|
||||||
fn type_info() -> SqliteTypeInfo {
|
|
||||||
<&[u8] as Type<Sqlite>>::type_info()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn compatible(ty: &SqliteTypeInfo) -> bool {
|
|
||||||
<&[u8] as Type<Sqlite>>::compatible(ty)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'q> Encode<'q, Sqlite> for ControllerUid {
|
|
||||||
//noinspection DuplicatedCode
|
|
||||||
fn encode_by_ref(&self, buf: &mut <Sqlite as HasArguments<'q>>::ArgumentBuffer) -> IsNull {
|
|
||||||
let uuid_val = self.0.as_bytes().to_vec();
|
|
||||||
<&Vec<u8> as Encode<Sqlite>>::encode(&uuid_val, buf)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'r> Decode<'r, Sqlite> for ControllerUid {
|
|
||||||
//noinspection DuplicatedCode
|
|
||||||
fn decode(value: SqliteValueRef<'r>) -> Result<Self, BoxDynError> {
|
|
||||||
Ok(Self::from(<&[u8] as Decode<Sqlite>>::decode(value)?))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&[u8]> for ControllerUid {
|
|
||||||
fn from(value: &[u8]) -> Self {
|
|
||||||
Self(Uuid::from_slice(&value).unwrap())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Vec<u8>> for ControllerUid {
|
|
||||||
fn from(value: Vec<u8>) -> Self {
|
|
||||||
Self::from(value.as_slice())
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,3 +1,3 @@
|
||||||
|
pub mod relays;
|
||||||
pub mod schedules;
|
pub mod schedules;
|
||||||
pub mod ws;
|
pub mod ws;
|
||||||
pub mod relays;
|
|
||||||
|
|
|
@ -1,13 +1,12 @@
|
||||||
use actix_web::{delete, get, post, put, web, HttpResponse};
|
use actix_web::{get, web, HttpResponse};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::pool::PoolConnection;
|
|
||||||
use sqlx::{Pool, Sqlite};
|
use sqlx::{Pool, Sqlite};
|
||||||
|
|
||||||
use crate::db::errors::DatabaseError;
|
use crate::db::DbRelay;
|
||||||
use crate::db::Relay;
|
|
||||||
use crate::db::Tag;
|
|
||||||
use crate::handlers::errors::ApiError;
|
use crate::handlers::errors::ApiError;
|
||||||
use crate::return_models::ReturnRelay;
|
use crate::models::Relay;
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub struct RequestRelay {
|
pub struct RequestRelay {
|
||||||
|
@ -19,10 +18,12 @@ pub struct RequestRelay {
|
||||||
pub async fn index(pool: web::Data<Pool<Sqlite>>) -> Result<HttpResponse, ApiError> {
|
pub async fn index(pool: web::Data<Pool<Sqlite>>) -> Result<HttpResponse, ApiError> {
|
||||||
let mut pool_conn = pool.acquire().await?;
|
let mut pool_conn = pool.acquire().await?;
|
||||||
|
|
||||||
let relays = Relay::get_all(&mut pool_conn).await?;
|
let relays = DbRelay::get_all(&mut pool_conn).await?;
|
||||||
|
|
||||||
let return_relays: Vec<ReturnRelay> =
|
let return_relays: Vec<Relay> = relays
|
||||||
relays.into_iter().map(|s| ReturnRelay::from_relay(s, &mut pool_conn)).collect();
|
.into_iter()
|
||||||
|
.map(|s| Relay::from_db_relay(s, &mut pool_conn))
|
||||||
|
.collect();
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(return_relays))
|
Ok(HttpResponse::Ok().json(return_relays))
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,16 +4,16 @@ use sqlx::pool::PoolConnection;
|
||||||
use sqlx::{Pool, Sqlite};
|
use sqlx::{Pool, Sqlite};
|
||||||
|
|
||||||
use crate::db::errors::DatabaseError;
|
use crate::db::errors::DatabaseError;
|
||||||
use crate::db::{Periods, Schedule};
|
use crate::db::DbTag;
|
||||||
use crate::db::Tag;
|
use crate::db::{DbPeriods, DbSchedule};
|
||||||
use crate::db::types::ScheduleUid;
|
|
||||||
use crate::handlers::errors::ApiError;
|
use crate::handlers::errors::ApiError;
|
||||||
use crate::return_models::ReturnSchedule;
|
use crate::models::Schedule;
|
||||||
|
use crate::types::ScheduleUid;
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub struct RequestSchedule {
|
pub struct RequestSchedule {
|
||||||
name: String,
|
name: String,
|
||||||
periods: Periods,
|
periods: DbPeriods,
|
||||||
tags: Vec<String>,
|
tags: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -21,10 +21,12 @@ pub struct RequestSchedule {
|
||||||
pub async fn index(pool: web::Data<Pool<Sqlite>>) -> Result<HttpResponse, ApiError> {
|
pub async fn index(pool: web::Data<Pool<Sqlite>>) -> Result<HttpResponse, ApiError> {
|
||||||
let mut pool_conn = pool.acquire().await?;
|
let mut pool_conn = pool.acquire().await?;
|
||||||
|
|
||||||
let schedules = Schedule::get_all(&mut pool_conn).await?;
|
let schedules = DbSchedule::get_all(&mut pool_conn).await?;
|
||||||
|
|
||||||
let return_schedules: Vec<ReturnSchedule> =
|
let return_schedules: Vec<Schedule> = schedules
|
||||||
schedules.into_iter().map(|s| ReturnSchedule::from_schedule(s, &mut pool_conn)).collect();
|
.into_iter()
|
||||||
|
.map(|s| Schedule::from_schedule(s, &mut pool_conn))
|
||||||
|
.collect();
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(return_schedules))
|
Ok(HttpResponse::Ok().json(return_schedules))
|
||||||
}
|
}
|
||||||
|
@ -37,12 +39,14 @@ pub async fn tagged(
|
||||||
let mut pool_conn = pool.acquire().await?;
|
let mut pool_conn = pool.acquire().await?;
|
||||||
|
|
||||||
let (tag,) = path.into_inner();
|
let (tag,) = path.into_inner();
|
||||||
let tag_db = Tag::get_by_tag(&mut pool_conn, &tag).await?;
|
let tag_db = DbTag::get_by_tag(&mut pool_conn, &tag).await?;
|
||||||
|
|
||||||
let schedules = Schedule::get_by_tag(&mut pool_conn, &tag_db).await?;
|
let schedules = DbSchedule::get_by_tag(&mut pool_conn, &tag_db).await?;
|
||||||
|
|
||||||
let return_schedules: Vec<ReturnSchedule> =
|
let return_schedules: Vec<Schedule> = schedules
|
||||||
schedules.into_iter().map(|s| ReturnSchedule::from_schedule(s, &mut pool_conn)).collect();
|
.into_iter()
|
||||||
|
.map(|s| Schedule::from_schedule(s, &mut pool_conn))
|
||||||
|
.collect();
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(return_schedules))
|
Ok(HttpResponse::Ok().json(return_schedules))
|
||||||
}
|
}
|
||||||
|
@ -57,9 +61,9 @@ pub async fn show(
|
||||||
let (schedule_uid,) = path.into_inner();
|
let (schedule_uid,) = path.into_inner();
|
||||||
let uid = ScheduleUid::try_from(schedule_uid.as_str()).or(Err(ApiError::BadUid))?;
|
let uid = ScheduleUid::try_from(schedule_uid.as_str()).or(Err(ApiError::BadUid))?;
|
||||||
|
|
||||||
let schedule = Schedule::get_by_uid(&mut pool_conn, &uid).await?;
|
let schedule = DbSchedule::get_by_uid(&mut pool_conn, &uid).await?;
|
||||||
|
|
||||||
let return_schedule = ReturnSchedule::from_schedule(schedule, &mut pool_conn);
|
let return_schedule = Schedule::from_schedule(schedule, &mut pool_conn);
|
||||||
Ok(HttpResponse::Ok().json(return_schedule))
|
Ok(HttpResponse::Ok().json(return_schedule))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -70,22 +74,22 @@ pub async fn add(
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let mut pool_conn = pool.acquire().await?;
|
let mut pool_conn = pool.acquire().await?;
|
||||||
|
|
||||||
let new_schedule = Schedule::create(&mut pool_conn, &data.name, &data.periods).await?;
|
let new_schedule = DbSchedule::create(&mut pool_conn, &data.name, &data.periods).await?;
|
||||||
|
|
||||||
new_schedule
|
new_schedule
|
||||||
.set_tags(&mut pool_conn, data.tags.as_slice())
|
.set_tags(&mut pool_conn, data.tags.as_slice())
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let return_schedule = ReturnSchedule::from_schedule(new_schedule, &mut pool_conn);
|
let return_schedule = Schedule::from_schedule(new_schedule, &mut pool_conn);
|
||||||
Ok(HttpResponse::Created().json(return_schedule))
|
Ok(HttpResponse::Created().json(return_schedule))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn add_list_single(
|
async fn add_list_single(
|
||||||
conn: &mut PoolConnection<Sqlite>,
|
conn: &mut PoolConnection<Sqlite>,
|
||||||
request_schedule: &RequestSchedule,
|
request_schedule: &RequestSchedule,
|
||||||
) -> Result<Schedule, DatabaseError> {
|
) -> Result<DbSchedule, DatabaseError> {
|
||||||
let new_schedule =
|
let new_schedule =
|
||||||
Schedule::create(conn, &request_schedule.name, &request_schedule.periods).await?;
|
DbSchedule::create(conn, &request_schedule.name, &request_schedule.periods).await?;
|
||||||
|
|
||||||
new_schedule
|
new_schedule
|
||||||
.set_tags(conn, request_schedule.tags.as_slice())
|
.set_tags(conn, request_schedule.tags.as_slice())
|
||||||
|
@ -101,7 +105,7 @@ pub async fn add_list(
|
||||||
) -> Result<HttpResponse, ApiError> {
|
) -> Result<HttpResponse, ApiError> {
|
||||||
let mut pool_conn = pool.acquire().await?;
|
let mut pool_conn = pool.acquire().await?;
|
||||||
|
|
||||||
let result: Vec<Result<Schedule, DatabaseError>> = data
|
let result: Vec<Result<DbSchedule, DatabaseError>> = data
|
||||||
.as_slice()
|
.as_slice()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|request_schedule| {
|
.map(|request_schedule| {
|
||||||
|
@ -109,10 +113,12 @@ pub async fn add_list(
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let mut return_schedules: Vec<ReturnSchedule> = Vec::new();
|
let mut return_schedules: Vec<Schedule> = Vec::new();
|
||||||
for schedule in result {
|
for schedule in result {
|
||||||
match schedule {
|
match schedule {
|
||||||
Ok(schedule) => return_schedules.push(ReturnSchedule::from_schedule(schedule, &mut pool_conn)),
|
Ok(schedule) => {
|
||||||
|
return_schedules.push(Schedule::from_schedule(schedule, &mut pool_conn))
|
||||||
|
}
|
||||||
Err(e) => return Ok(HttpResponse::from(e)),
|
Err(e) => return Ok(HttpResponse::from(e)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -130,7 +136,7 @@ pub async fn update(
|
||||||
let (schedule_uid,) = path.into_inner();
|
let (schedule_uid,) = path.into_inner();
|
||||||
let uid = ScheduleUid::try_from(schedule_uid.as_str()).or(Err(ApiError::BadUid))?;
|
let uid = ScheduleUid::try_from(schedule_uid.as_str()).or(Err(ApiError::BadUid))?;
|
||||||
|
|
||||||
let schedule = Schedule::get_by_uid(&mut pool_conn, &uid).await?;
|
let schedule = DbSchedule::get_by_uid(&mut pool_conn, &uid).await?;
|
||||||
|
|
||||||
let schedule = schedule
|
let schedule = schedule
|
||||||
.update(&mut pool_conn, data.name.as_str(), &data.periods)
|
.update(&mut pool_conn, data.name.as_str(), &data.periods)
|
||||||
|
@ -140,7 +146,7 @@ pub async fn update(
|
||||||
.set_tags(&mut pool_conn, data.tags.as_slice())
|
.set_tags(&mut pool_conn, data.tags.as_slice())
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let return_schedule = ReturnSchedule::from_schedule(schedule, &mut pool_conn);
|
let return_schedule = Schedule::from_schedule(schedule, &mut pool_conn);
|
||||||
Ok(HttpResponse::Ok().json(return_schedule))
|
Ok(HttpResponse::Ok().json(return_schedule))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -158,7 +164,7 @@ pub async fn delete(
|
||||||
ScheduleUid::Off => Err(ApiError::ProtectedSchedule),
|
ScheduleUid::Off => Err(ApiError::ProtectedSchedule),
|
||||||
ScheduleUid::On => Err(ApiError::ProtectedSchedule),
|
ScheduleUid::On => Err(ApiError::ProtectedSchedule),
|
||||||
ScheduleUid::Any(_) => {
|
ScheduleUid::Any(_) => {
|
||||||
Schedule::delete_by_uid(&mut pool_conn, uid).await?;
|
DbSchedule::delete_by_uid(&mut pool_conn, uid).await?;
|
||||||
Ok(HttpResponse::Ok().json("schedule got deleted"))
|
Ok(HttpResponse::Ok().json("schedule got deleted"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use crate::db::Schedule;
|
use crate::db::DbSchedule;
|
||||||
use crate::handlers::errors::ApiError;
|
use crate::handlers::errors::ApiError;
|
||||||
use actix::{Actor, StreamHandler};
|
use actix::{Actor, StreamHandler};
|
||||||
use actix_web::{get, web, HttpRequest, HttpResponse};
|
use actix_web::{get, web, HttpRequest, HttpResponse};
|
||||||
|
@ -15,10 +15,10 @@ impl Actor for ControllerWs {
|
||||||
type Context = ws::WebsocketContext<Self>;
|
type Context = ws::WebsocketContext<Self>;
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_schedules(pool: &mut Pool<Sqlite>) -> Result<Vec<Schedule>, ApiError> {
|
async fn get_schedules(pool: &mut Pool<Sqlite>) -> Result<Vec<DbSchedule>, ApiError> {
|
||||||
let mut pool_conn = pool.acquire().await?;
|
let mut pool_conn = pool.acquire().await?;
|
||||||
|
|
||||||
Ok(Schedule::get_all(&mut pool_conn).await?)
|
Ok(DbSchedule::get_all(&mut pool_conn).await?)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl StreamHandler<Result<Message, ProtocolError>> for ControllerWs {
|
impl StreamHandler<Result<Message, ProtocolError>> for ControllerWs {
|
||||||
|
@ -28,9 +28,9 @@ impl StreamHandler<Result<Message, ProtocolError>> for ControllerWs {
|
||||||
match msg {
|
match msg {
|
||||||
Ok(Message::Ping(msg)) => ctx.pong(&msg),
|
Ok(Message::Ping(msg)) => ctx.pong(&msg),
|
||||||
Ok(Message::Text(text)) => {
|
Ok(Message::Text(text)) => {
|
||||||
println!("Got text: {}", text.to_string());
|
println!("Got text: {}", text);
|
||||||
ctx.text(schedules_json)
|
ctx.text(schedules_json)
|
||||||
},
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
pub mod constants;
|
pub mod constants;
|
||||||
pub mod db;
|
pub mod db;
|
||||||
pub mod handlers;
|
pub mod handlers;
|
||||||
pub mod return_models;
|
pub mod models;
|
||||||
|
pub mod types;
|
||||||
pub mod utils;
|
pub mod utils;
|
||||||
|
|
51
emgauwa-lib/src/models/mod.rs
Normal file
51
emgauwa-lib/src/models/mod.rs
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
use crate::db;
|
||||||
|
use futures::executor;
|
||||||
|
use serde_derive::Serialize;
|
||||||
|
use sqlx::pool::PoolConnection;
|
||||||
|
use sqlx::Sqlite;
|
||||||
|
|
||||||
|
#[derive(Serialize, Debug)]
|
||||||
|
pub struct Schedule {
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub schedule: db::DbSchedule,
|
||||||
|
pub tags: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Debug)]
|
||||||
|
pub struct Relay {
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub relay: db::DbRelay,
|
||||||
|
pub controller: db::DbController,
|
||||||
|
pub tags: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Debug)]
|
||||||
|
pub struct Controller {
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub controller: db::DbController,
|
||||||
|
pub relays: Vec<Relay>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Schedule {
|
||||||
|
pub fn from_schedule(schedule: db::DbSchedule, conn: &mut PoolConnection<Sqlite>) -> Self {
|
||||||
|
let schedule = schedule.clone();
|
||||||
|
let tags = executor::block_on(schedule.get_tags(conn)).unwrap();
|
||||||
|
|
||||||
|
Schedule { schedule, tags }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Relay {
|
||||||
|
pub fn from_db_relay(relay: db::DbRelay, conn: &mut PoolConnection<Sqlite>) -> Self {
|
||||||
|
let relay = relay.clone();
|
||||||
|
let controller =
|
||||||
|
executor::block_on(db::DbController::get(conn, relay.controller_id)).unwrap();
|
||||||
|
let tags = executor::block_on(relay.get_tags(conn)).unwrap();
|
||||||
|
|
||||||
|
Relay {
|
||||||
|
relay,
|
||||||
|
controller,
|
||||||
|
tags,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,50 +0,0 @@
|
||||||
use crate::db::{Controller, Relay, Schedule};
|
|
||||||
use futures::executor;
|
|
||||||
use serde::Serialize;
|
|
||||||
use sqlx::pool::PoolConnection;
|
|
||||||
use sqlx::Sqlite;
|
|
||||||
use crate::db::types::ControllerUid;
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
|
||||||
pub struct ReturnSchedule {
|
|
||||||
#[serde(flatten)]
|
|
||||||
pub schedule: Schedule,
|
|
||||||
pub tags: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ReturnSchedule {
|
|
||||||
pub fn from_schedule(schedule: Schedule, conn: &mut PoolConnection<Sqlite>) -> Self {
|
|
||||||
let schedule = schedule.clone();
|
|
||||||
let tags = executor::block_on(schedule.get_tags(conn)).unwrap();
|
|
||||||
|
|
||||||
ReturnSchedule {
|
|
||||||
schedule,
|
|
||||||
tags,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
|
||||||
pub struct ReturnRelay {
|
|
||||||
#[serde(flatten)]
|
|
||||||
pub relay: Relay,
|
|
||||||
pub controller: Controller,
|
|
||||||
pub controller_id: ControllerUid,
|
|
||||||
pub tags: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ReturnRelay {
|
|
||||||
pub fn from_relay(relay: Relay, conn: &mut PoolConnection<Sqlite>) -> Self {
|
|
||||||
let relay = relay.clone();
|
|
||||||
let controller = executor::block_on(Controller::get(conn, relay.controller_id)).unwrap();
|
|
||||||
let controller_uid = controller.uid.clone();
|
|
||||||
let tags = executor::block_on(relay.get_tags(conn)).unwrap();
|
|
||||||
|
|
||||||
ReturnRelay {
|
|
||||||
relay,
|
|
||||||
controller,
|
|
||||||
controller_id: controller_uid,
|
|
||||||
tags,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
68
emgauwa-lib/src/types/controller_uid.rs
Normal file
68
emgauwa-lib/src/types/controller_uid.rs
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
use serde::{Serialize, Serializer};
|
||||||
|
use sqlx::database::HasArguments;
|
||||||
|
use sqlx::encode::IsNull;
|
||||||
|
use sqlx::error::BoxDynError;
|
||||||
|
use sqlx::sqlite::{SqliteTypeInfo, SqliteValueRef};
|
||||||
|
use sqlx::{Decode, Encode, Sqlite, Type};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct ControllerUid(Uuid);
|
||||||
|
|
||||||
|
impl Default for ControllerUid {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self(Uuid::new_v4())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Serialize for ControllerUid {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: Serializer,
|
||||||
|
{
|
||||||
|
String::from(self).serialize(serializer)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&ControllerUid> for String {
|
||||||
|
fn from(uid: &ControllerUid) -> String {
|
||||||
|
uid.0.as_hyphenated().to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Type<Sqlite> for ControllerUid {
|
||||||
|
fn type_info() -> SqliteTypeInfo {
|
||||||
|
<&[u8] as Type<Sqlite>>::type_info()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compatible(ty: &SqliteTypeInfo) -> bool {
|
||||||
|
<&[u8] as Type<Sqlite>>::compatible(ty)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'q> Encode<'q, Sqlite> for ControllerUid {
|
||||||
|
//noinspection DuplicatedCode
|
||||||
|
fn encode_by_ref(&self, buf: &mut <Sqlite as HasArguments<'q>>::ArgumentBuffer) -> IsNull {
|
||||||
|
let uuid_val = self.0.as_bytes().to_vec();
|
||||||
|
<&Vec<u8> as Encode<Sqlite>>::encode(&uuid_val, buf)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'r> Decode<'r, Sqlite> for ControllerUid {
|
||||||
|
//noinspection DuplicatedCode
|
||||||
|
fn decode(value: SqliteValueRef<'r>) -> Result<Self, BoxDynError> {
|
||||||
|
Ok(Self::from(<&[u8] as Decode<Sqlite>>::decode(value)?))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&[u8]> for ControllerUid {
|
||||||
|
fn from(value: &[u8]) -> Self {
|
||||||
|
Self(Uuid::from_slice(value).unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Vec<u8>> for ControllerUid {
|
||||||
|
fn from(value: Vec<u8>) -> Self {
|
||||||
|
Self::from(value.as_slice())
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,5 +1,5 @@
|
||||||
mod schedule_uid;
|
|
||||||
mod controller_uid;
|
mod controller_uid;
|
||||||
|
mod schedule_uid;
|
||||||
|
|
||||||
pub use schedule_uid::ScheduleUid;
|
|
||||||
pub use controller_uid::ControllerUid;
|
pub use controller_uid::ControllerUid;
|
||||||
|
pub use schedule_uid::ScheduleUid;
|
|
@ -1,9 +1,8 @@
|
||||||
pub fn load_settings<T>(config_name: &str, env_prefix: &str) -> T
|
pub fn load_settings<T>(config_name: &str, env_prefix: &str) -> T
|
||||||
where
|
where
|
||||||
for<'de> T: serde::Deserialize<'de>
|
for<'de> T: serde::Deserialize<'de>,
|
||||||
{
|
{
|
||||||
let default_file = config::File::with_name(&format!("emgauwa-{}", config_name))
|
let default_file = config::File::with_name(&format!("emgauwa-{}", config_name)).required(false);
|
||||||
.required(false);
|
|
||||||
|
|
||||||
config::Config::builder()
|
config::Config::builder()
|
||||||
.add_source(default_file)
|
.add_source(default_file)
|
||||||
|
|
Loading…
Reference in a new issue