Add trait for db_model to model conversion

This commit is contained in:
Tobias Reisinger 2023-11-27 15:09:14 +01:00
parent 8dab4b9a50
commit cb47dcda5c
Signed by: serguzim
GPG key ID: 13AD60C237A28DFE
5 changed files with 92 additions and 59 deletions

View file

@ -3,6 +3,7 @@ use std::str;
use crate::relay_loop::run_relay_loop; use crate::relay_loop::run_relay_loop;
use crate::settings::Settings; use crate::settings::Settings;
use emgauwa_lib::db::{DbController, DbRelay}; use emgauwa_lib::db::{DbController, DbRelay};
use emgauwa_lib::models::convert_db_list;
use emgauwa_lib::types::ControllerUid; use emgauwa_lib::types::ControllerUid;
use emgauwa_lib::{db, models}; use emgauwa_lib::{db, models};
use futures::channel::mpsc; use futures::channel::mpsc;
@ -88,10 +89,7 @@ async fn main() {
.await .await
.unwrap(); .unwrap();
let relays = db_relays let relays = convert_db_list(&mut conn, db_relays).expect("Failed to convert relays");
.into_iter()
.map(|relay| models::Relay::from_db_relay(relay, &mut conn))
.collect();
let this = models::Controller { let this = models::Controller {
controller: db_controller, controller: db_controller,

View file

@ -1,8 +1,15 @@
use crate::settings::Settings; use crate::settings::Settings;
use chrono::Local; use chrono::Local;
use std::time::Duration;
use tokio::time;
#[allow(unused_variables)] #[allow(unused_variables)]
pub async fn run_relay_loop(settings: Settings) { pub async fn run_relay_loop(settings: Settings) {
let next_timestamp = Local::now().naive_local(); let default_duration = Duration::from_millis(1000);
loop {} loop {
// naivetime timestamp for now
let next_timestamp = Local::now().naive_local().time() + default_duration;
time::sleep(default_duration).await;
println!("Relay loop: {}", next_timestamp)
}
} }

View file

@ -6,7 +6,7 @@ use sqlx::{Pool, Sqlite};
use crate::db::DbRelay; use crate::db::DbRelay;
use crate::handlers::errors::ApiError; use crate::handlers::errors::ApiError;
use crate::models::Relay; use crate::models::{convert_db_list, Relay};
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct RequestRelay { pub struct RequestRelay {
@ -18,14 +18,11 @@ pub struct RequestRelay {
pub async fn index(pool: web::Data<Pool<Sqlite>>) -> Result<HttpResponse, ApiError> { pub async fn index(pool: web::Data<Pool<Sqlite>>) -> Result<HttpResponse, ApiError> {
let mut pool_conn = pool.acquire().await?; let mut pool_conn = pool.acquire().await?;
let relays = DbRelay::get_all(&mut pool_conn).await?; let db_relays = DbRelay::get_all(&mut pool_conn).await?;
let return_relays: Vec<Relay> = relays let relays: Vec<Relay> = convert_db_list(&mut pool_conn, db_relays)?;
.into_iter()
.map(|s| Relay::from_db_relay(s, &mut pool_conn))
.collect();
Ok(HttpResponse::Ok().json(return_relays)) Ok(HttpResponse::Ok().json(relays))
} }
//#[get("/api/v1/tags/tag/{tag}")] //#[get("/api/v1/tags/tag/{tag}")]

View file

@ -7,7 +7,7 @@ use crate::db::errors::DatabaseError;
use crate::db::DbTag; use crate::db::DbTag;
use crate::db::{DbPeriods, DbSchedule}; use crate::db::{DbPeriods, DbSchedule};
use crate::handlers::errors::ApiError; use crate::handlers::errors::ApiError;
use crate::models::Schedule; use crate::models::{convert_db_list, FromDbModel, Schedule};
use crate::types::ScheduleUid; use crate::types::ScheduleUid;
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
@ -21,14 +21,11 @@ pub struct RequestSchedule {
pub async fn index(pool: web::Data<Pool<Sqlite>>) -> Result<HttpResponse, ApiError> { pub async fn index(pool: web::Data<Pool<Sqlite>>) -> Result<HttpResponse, ApiError> {
let mut pool_conn = pool.acquire().await?; let mut pool_conn = pool.acquire().await?;
let schedules = DbSchedule::get_all(&mut pool_conn).await?; let db_schedules = DbSchedule::get_all(&mut pool_conn).await?;
let return_schedules: Vec<Schedule> = schedules let schedules: Vec<Schedule> = convert_db_list(&mut pool_conn, db_schedules)?;
.into_iter()
.map(|s| Schedule::from_schedule(s, &mut pool_conn))
.collect();
Ok(HttpResponse::Ok().json(return_schedules)) Ok(HttpResponse::Ok().json(schedules))
} }
#[get("/api/v1/schedules/tag/{tag}")] #[get("/api/v1/schedules/tag/{tag}")]
@ -43,14 +40,11 @@ pub async fn tagged(
.await? .await?
.ok_or(DatabaseError::NotFound)?; .ok_or(DatabaseError::NotFound)?;
let schedules = DbSchedule::get_by_tag(&mut pool_conn, &tag_db).await?; let db_schedules = DbSchedule::get_by_tag(&mut pool_conn, &tag_db).await?;
let return_schedules: Vec<Schedule> = schedules let schedules: Vec<Schedule> = convert_db_list(&mut pool_conn, db_schedules)?;
.into_iter()
.map(|s| Schedule::from_schedule(s, &mut pool_conn))
.collect();
Ok(HttpResponse::Ok().json(return_schedules)) Ok(HttpResponse::Ok().json(schedules))
} }
#[get("/api/v1/schedules/{schedule_id}")] #[get("/api/v1/schedules/{schedule_id}")]
@ -67,7 +61,7 @@ pub async fn show(
.await? .await?
.ok_or(DatabaseError::NotFound)?; .ok_or(DatabaseError::NotFound)?;
let return_schedule = Schedule::from_schedule(schedule, &mut pool_conn); let return_schedule = Schedule::from_db_model(&mut pool_conn, schedule);
Ok(HttpResponse::Ok().json(return_schedule)) Ok(HttpResponse::Ok().json(return_schedule))
} }
@ -84,7 +78,7 @@ pub async fn add(
.set_tags(&mut pool_conn, data.tags.as_slice()) .set_tags(&mut pool_conn, data.tags.as_slice())
.await?; .await?;
let return_schedule = Schedule::from_schedule(new_schedule, &mut pool_conn); let return_schedule = Schedule::from_db_model(&mut pool_conn, new_schedule);
Ok(HttpResponse::Created().json(return_schedule)) Ok(HttpResponse::Created().json(return_schedule))
} }
@ -109,24 +103,20 @@ pub async fn add_list(
) -> Result<HttpResponse, ApiError> { ) -> Result<HttpResponse, ApiError> {
let mut pool_conn = pool.acquire().await?; let mut pool_conn = pool.acquire().await?;
let result: Vec<Result<DbSchedule, DatabaseError>> = data let mut db_schedules: Vec<DbSchedule> = Vec::new();
.as_slice() data.iter().try_for_each(|s| {
.iter() let new_s = futures::executor::block_on(add_list_single(&mut pool_conn, s));
.map(|request_schedule| { match new_s {
futures::executor::block_on(add_list_single(&mut pool_conn, request_schedule)) Ok(new_s) => {
}) db_schedules.push(new_s);
.collect(); Ok(())
}
Err(e) => Err(e),
}
})?;
let mut return_schedules: Vec<Schedule> = Vec::new(); let schedules: Vec<Schedule> = convert_db_list(&mut pool_conn, db_schedules)?;
for schedule in result { Ok(HttpResponse::Created().json(schedules))
match schedule {
Ok(schedule) => {
return_schedules.push(Schedule::from_schedule(schedule, &mut pool_conn))
}
Err(e) => return Ok(HttpResponse::from(e)),
}
}
Ok(HttpResponse::Created().json(return_schedules))
} }
#[put("/api/v1/schedules/{schedule_id}")] #[put("/api/v1/schedules/{schedule_id}")]
@ -152,7 +142,7 @@ pub async fn update(
.set_tags(&mut pool_conn, data.tags.as_slice()) .set_tags(&mut pool_conn, data.tags.as_slice())
.await?; .await?;
let return_schedule = Schedule::from_schedule(schedule, &mut pool_conn); let return_schedule = Schedule::from_db_model(&mut pool_conn, schedule);
Ok(HttpResponse::Ok().json(return_schedule)) Ok(HttpResponse::Ok().json(return_schedule))
} }

View file

@ -1,20 +1,33 @@
use crate::db; use crate::db;
use crate::db::errors::DatabaseError;
use crate::db::{DbRelay, DbSchedule};
use futures::executor; use futures::executor;
use serde_derive::Serialize; use serde_derive::Serialize;
use sqlx::pool::PoolConnection; use sqlx::pool::PoolConnection;
use sqlx::Sqlite; use sqlx::Sqlite;
pub trait FromDbModel {
type DbModel: Clone;
fn from_db_model(
conn: &mut PoolConnection<Sqlite>,
db_model: Self::DbModel,
) -> Result<Self, DatabaseError>
where
Self: Sized;
}
#[derive(Serialize, Debug)] #[derive(Serialize, Debug)]
pub struct Schedule { pub struct Schedule {
#[serde(flatten)] #[serde(flatten)]
pub schedule: db::DbSchedule, pub schedule: DbSchedule,
pub tags: Vec<String>, pub tags: Vec<String>,
} }
#[derive(Serialize, Debug)] #[derive(Serialize, Debug)]
pub struct Relay { pub struct Relay {
#[serde(flatten)] #[serde(flatten)]
pub relay: db::DbRelay, pub relay: DbRelay,
pub controller: db::DbController, pub controller: db::DbController,
pub tags: Vec<String>, pub tags: Vec<String>,
} }
@ -26,25 +39,53 @@ pub struct Controller {
pub relays: Vec<Relay>, pub relays: Vec<Relay>,
} }
impl Schedule { impl FromDbModel for Schedule {
pub fn from_schedule(schedule: db::DbSchedule, conn: &mut PoolConnection<Sqlite>) -> Self { type DbModel = DbSchedule;
let schedule = schedule.clone();
let tags = executor::block_on(schedule.get_tags(conn)).unwrap();
Schedule { schedule, tags } fn from_db_model(
conn: &mut PoolConnection<Sqlite>,
db_model: Self::DbModel,
) -> Result<Self, DatabaseError> {
let schedule = db_model.clone();
let tags = executor::block_on(schedule.get_tags(conn))?;
Ok(Schedule { schedule, tags })
} }
} }
impl Relay { impl FromDbModel for Relay {
pub fn from_db_relay(relay: db::DbRelay, conn: &mut PoolConnection<Sqlite>) -> Self { type DbModel = DbRelay;
let relay = relay.clone();
let controller = executor::block_on(relay.get_controller(conn)).unwrap();
let tags = executor::block_on(relay.get_tags(conn)).unwrap();
Relay { fn from_db_model(
conn: &mut PoolConnection<Sqlite>,
db_model: Self::DbModel,
) -> Result<Self, DatabaseError> {
let relay = db_model.clone();
let controller = executor::block_on(relay.get_controller(conn))?;
let tags = executor::block_on(relay.get_tags(conn))?;
Ok(Relay {
relay, relay,
controller, controller,
tags, tags,
} })
} }
} }
pub fn convert_db_list<T: FromDbModel>(
conn: &mut PoolConnection<Sqlite>,
db_models: Vec<T::DbModel>,
) -> Result<Vec<T>, DatabaseError> {
let mut result: Vec<T> = Vec::new();
db_models.into_iter().try_for_each(|s| {
let new = T::from_db_model(conn, s);
match new {
Ok(new) => {
result.push(new);
Ok(())
}
Err(e) => Err(e),
}
})?;
Ok(result)
}