From 483fd60daaa629d4a731ae1639bd5df9dcefede5 Mon Sep 17 00:00:00 2001 From: Tobias Reisinger Date: Fri, 5 Nov 2021 16:32:30 +0100 Subject: [PATCH] Add saving periods --- .gitignore | 1 + Cargo.lock | Bin 47245 -> 48272 bytes Cargo.toml | 1 + migrations/2021-10-13-000000_init/up.sql | 6 +- src/db.rs | 41 ++++++----- src/db/errors.rs | 16 ++++- src/db/models.rs | 87 +++++++++++++++++++++-- src/db/schema.rs | 2 +- src/handlers/errors.rs | 34 +++++++++ src/handlers/mod.rs | 1 + src/handlers/v1/schedules.rs | 51 +++++++++++-- src/main.rs | 14 ++-- src/{db => }/types.rs | 19 ++--- 13 files changed, 224 insertions(+), 49 deletions(-) create mode 100644 src/handlers/errors.rs rename src/{db => }/types.rs (88%) diff --git a/.gitignore b/.gitignore index 6f575be..d4d55a3 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,4 @@ emgauwa-core.sqlite # Added by cargo /target +/api.http diff --git a/Cargo.lock b/Cargo.lock index a9b38132ae7a6ad894b667276e5559baa6b73b8f..37f426231aaf9ca4591d40c47ac8ba520b71c1c5 100644 GIT binary patch delta 549 zcmZ9Izlsz=5QlNaaa}n)5!Pc(#^q4abEvNVFUXq72I6^vfgspjU2`|V?ZWJ8E|>~J zn;Q56BFq!`2tI%hU}$LO+=6##R=^7FU3+|W=crvSmebK2Zdc86Q9dlEP^n7zd~WCIBV!>X"] [dependencies] actix-web = "3" +chrono = { version = "0.4", features = ["serde"] } diesel = { version = "1.4", features = ["sqlite", "uuid"] } diesel_migrations = "1.4" dotenv = "0.15" diff --git a/migrations/2021-10-13-000000_init/up.sql b/migrations/2021-10-13-000000_init/up.sql index 0159598..6ac0492 100644 --- a/migrations/2021-10-13-000000_init/up.sql +++ b/migrations/2021-10-13-000000_init/up.sql @@ -41,13 +41,13 @@ CREATE TABLE schedules UNIQUE, name VARCHAR(128) NOT NULL, - periods TEXT + periods BLOB NOT NULL ); --INSERT INTO schedules (uid, name, periods) VALUES (x'6f666600000000000000000000000000', 'off', x'00'); --INSERT INTO schedules (uid, name, periods) VALUES (x'6f6e0000000000000000000000000000', 'on', x'010000009F05'); -INSERT INTO schedules (uid, name, periods) VALUES (x'00', 'off', '00'); -INSERT INTO schedules (uid, name, periods) VALUES (x'01', 'on', '010000009F05'); +INSERT INTO schedules (uid, name, periods) VALUES (x'00', 'off', x''); +INSERT INTO schedules (uid, name, periods) VALUES (x'01', 'on', x'0000173B'); CREATE TABLE tags ( diff --git a/src/db.rs b/src/db.rs index c3b4c2c..263b0b1 100644 --- a/src/db.rs +++ b/src/db.rs @@ -1,20 +1,18 @@ +use std::env; + +use diesel::dsl::sql; +use diesel::prelude::*; +use diesel_migrations::embed_migrations; +use dotenv::dotenv; + +use errors::DatabaseError; +use models::*; +use schema::schedules::dsl::*; +use crate::types::EmgauwaUid; + pub mod errors; pub mod models; pub mod schema; -mod types; - -use diesel::prelude::*; - -use diesel::dsl::sql; -use dotenv::dotenv; -use std::env; - -use models::*; -use schema::schedules::dsl::*; - -use diesel_migrations::embed_migrations; -use errors::DatabaseError; -use types::EmgauwaUid; embed_migrations!("migrations"); @@ -34,18 +32,27 @@ pub fn run_migrations() { pub fn get_schedules() -> Vec { let connection = get_connection(); schedules - .limit(5) .load::(&connection) .expect("Error loading schedules") } -pub fn create_schedule(new_name: &str) -> Result { +pub fn get_schedule_by_uid(filter_uid: EmgauwaUid) -> Result { + let connection = get_connection(); + let result = schedules + .filter(uid.eq(filter_uid)) + .first::(&connection) + .or(Err(DatabaseError::NotFound))?; + + Ok(result) +} + +pub fn create_schedule(new_name: &str, new_periods: &Periods) -> Result { let connection = get_connection(); let new_schedule = NewSchedule { uid: &EmgauwaUid::default(), name: new_name, - periods: "", + periods: new_periods }; diesel::insert_into(schedules) diff --git a/src/db/errors.rs b/src/db/errors.rs index edf7414..ca2a8cd 100644 --- a/src/db/errors.rs +++ b/src/db/errors.rs @@ -4,6 +4,16 @@ use serde::{Serialize, Serializer}; pub enum DatabaseError { InsertError, InsertGetError, + NotFound, +} + +impl DatabaseError { + fn to_code(&self) -> u32 { + match self { + DatabaseError::NotFound => 404, + _ => 500 + } + } } impl Serialize for DatabaseError { @@ -11,8 +21,9 @@ impl Serialize for DatabaseError { where S: Serializer, { - let mut s = serializer.serialize_struct("error", 2)?; - s.serialize_field("code", &500)?; + let mut s = serializer.serialize_struct("error", 3)?; + s.serialize_field("type", "database-error")?; + s.serialize_field("code", &self.to_code())?; s.serialize_field("description", &String::from(self))?; s.end() } @@ -25,6 +36,7 @@ impl From<&DatabaseError> for String { DatabaseError::InsertGetError => { String::from("error retrieving new entry from database (your entry was saved)") } + DatabaseError::NotFound => String::from("model was not found in database") } } } diff --git a/src/db/models.rs b/src/db/models.rs index 4f61870..71d6afa 100644 --- a/src/db/models.rs +++ b/src/db/models.rs @@ -1,14 +1,24 @@ -use super::types::EmgauwaUid; -use serde::Serialize; +use chrono::{NaiveTime, Timelike}; +use diesel::backend::Backend; +use diesel::deserialize::FromSql; +use diesel::serialize::{IsNull, Output, ToSql}; +use diesel::sql_types::Binary; +use diesel::sqlite::Sqlite; +use diesel::{deserialize, serialize}; +use serde::{Deserialize, Serialize}; +use std::io::Write; use super::schema::schedules; +use crate::types::EmgauwaUid; #[derive(Serialize, Queryable)] pub struct Schedule { + #[serde(skip)] pub id: i32, + #[serde(alias = "id")] pub uid: EmgauwaUid, pub name: String, - pub periods: String, + pub periods: Periods, } #[derive(Insertable)] @@ -16,5 +26,74 @@ pub struct Schedule { pub struct NewSchedule<'a> { pub uid: &'a EmgauwaUid, pub name: &'a str, - pub periods: &'a str, + pub periods: &'a Periods, +} + +#[derive(Debug, Serialize, Deserialize, AsExpression, FromSqlRow, PartialEq, Clone)] +#[sql_type = "Binary"] +pub struct Period { + #[serde(with = "period_format")] + pub start: NaiveTime, + #[serde(with = "period_format")] + pub end: NaiveTime, +} + +#[derive(Debug, Serialize, Deserialize, AsExpression, FromSqlRow, PartialEq, Clone)] +#[sql_type = "Binary"] +pub struct Periods(pub(crate) Vec); + +mod period_format { + use chrono::NaiveTime; + use serde::{self, Deserialize, Deserializer, Serializer}; + + const FORMAT: &'static str = "%H:%M"; + + pub fn serialize(time: &NaiveTime, serializer: S) -> Result + where + S: Serializer, + { + let s = format!("{}", time.format(FORMAT)); + serializer.serialize_str(&s) + } + + pub fn deserialize<'de, D>(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + NaiveTime::parse_from_str(&s, FORMAT).map_err(serde::de::Error::custom) + } +} + +impl ToSql for Periods { + fn to_sql(&self, out: &mut Output) -> serialize::Result { + for period in self.0.iter() { + out.write_all(&[ + period.start.hour() as u8, + period.start.minute() as u8, + period.end.hour() as u8, + period.end.minute() as u8, + ])?; + } + Ok(IsNull::No) + } +} + +impl FromSql for Periods { + fn from_sql(bytes: Option<&::RawValue>) -> deserialize::Result { + let blob = bytes.unwrap().read_blob(); + + let mut vec = Vec::new(); + for i in (3..blob.len()).step_by(4) { + let start_val_h: u32 = blob[i - 3] as u32; + let start_val_m: u32 = blob[i - 2] as u32; + let end_val_h: u32 = blob[i - 1] as u32; + let end_val_m: u32 = blob[i - 0] as u32; + vec.push(Period { + start: NaiveTime::from_hms(start_val_h, start_val_m, 0), + end: NaiveTime::from_hms(end_val_h, end_val_m, 0), + }); + } + Ok(Periods(vec)) + } } diff --git a/src/db/schema.rs b/src/db/schema.rs index 50e04a1..340996b 100644 --- a/src/db/schema.rs +++ b/src/db/schema.rs @@ -60,7 +60,7 @@ table! { id -> Integer, uid -> Binary, name -> Text, - periods -> Text, + periods -> Binary, } } diff --git a/src/handlers/errors.rs b/src/handlers/errors.rs new file mode 100644 index 0000000..b79e10e --- /dev/null +++ b/src/handlers/errors.rs @@ -0,0 +1,34 @@ +use serde::ser::SerializeStruct; +use serde::{Serialize, Serializer}; + +pub enum HandlerError { + BadUidError, +} + +impl HandlerError { + fn to_code(&self) -> u32 { + match self { + HandlerError::BadUidError => 400 + } + } +} + +impl Serialize for HandlerError { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let mut s = serializer.serialize_struct("error", 2)?; + s.serialize_field("code", &self.to_code())?; + s.serialize_field("description", &String::from(self))?; + s.end() + } +} + +impl From<&HandlerError> for String { + fn from(err: &HandlerError) -> Self { + match err { + HandlerError::BadUidError => String::from("the uid is in a bad format"), + } + } +} diff --git a/src/handlers/mod.rs b/src/handlers/mod.rs index a3a6d96..628befb 100644 --- a/src/handlers/mod.rs +++ b/src/handlers/mod.rs @@ -1 +1,2 @@ pub mod v1; +mod errors; diff --git a/src/handlers/v1/schedules.rs b/src/handlers/v1/schedules.rs index ee3042c..06fcb9f 100644 --- a/src/handlers/v1/schedules.rs +++ b/src/handlers/v1/schedules.rs @@ -1,17 +1,58 @@ +use std::str::FromStr; +use actix_web::{HttpResponse, Responder, web, get}; +use serde::{Serialize, Deserialize}; +use uuid::Uuid; + use crate::db; -use actix_web::{HttpResponse, Responder}; +use crate::db::models::Periods; +use crate::handlers::errors::HandlerError; +use crate::types::EmgauwaUid; + +#[derive(Debug, Serialize, Deserialize)] +pub struct RequestSchedule { + name: String, + periods: Periods, +} pub async fn index() -> impl Responder { let schedules = db::get_schedules(); HttpResponse::Ok().json(schedules) } -pub async fn get() -> impl Responder { - "hello from get schedules by id" +#[get("/api/v1/schedules/{schedule_id}")] +pub async fn show(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Responder { + + let emgauwa_uid = match schedule_uid.as_str() { + "on" => Ok(EmgauwaUid::On), + "off" => Ok(EmgauwaUid::Off), + any => match Uuid::from_str(any) { + Ok(uuid) => Ok(EmgauwaUid::Any(uuid)), + Err(_) => Err(HandlerError::BadUidError) + } + }; + + match emgauwa_uid { + Ok(uid) => { + let schedule = db::get_schedule_by_uid(uid); + match schedule { + Ok(ok) => HttpResponse::Ok().json(ok), + Err(err) => HttpResponse::NotFound().json(err), + } + }, + Err(err) => HttpResponse::BadRequest().json(err) + } + } -pub async fn add() -> impl Responder { - let new_schedule = db::create_schedule("TEST"); +pub async fn add(post: web::Json) -> impl Responder { + + println!("model: {:?}", post); + + for period in post.periods.0.iter() { + println!("start: {:?}; end: {:?}", period.start, period.end); + } + + let new_schedule = db::create_schedule(&post.name, &post.periods); match new_schedule { Ok(ok) => HttpResponse::Ok().json(ok), diff --git a/src/main.rs b/src/main.rs index cdd5ca1..65f8cb0 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,13 +1,14 @@ -mod db; -mod handlers; - #[macro_use] extern crate diesel; #[macro_use] extern crate diesel_migrations; extern crate dotenv; -use actix_web::{web, App, HttpServer}; +use actix_web::{App, HttpServer, web}; + +mod db; +mod handlers; +mod types; #[actix_web::main] async fn main() -> std::io::Result<()> { @@ -23,10 +24,7 @@ async fn main() -> std::io::Result<()> { "/api/v1/schedules", web::post().to(handlers::v1::schedules::add), ) - .route( - "/api/v1/schedules/{id}", - web::get().to(handlers::v1::schedules::get), - ) + .service(handlers::v1::schedules::show) .route( "/api/v1/schedules/{id}", web::delete().to(handlers::v1::schedules::delete), diff --git a/src/db/types.rs b/src/types.rs similarity index 88% rename from src/db/types.rs rename to src/types.rs index 8e43cbc..2d7ade2 100644 --- a/src/db/types.rs +++ b/src/types.rs @@ -1,3 +1,6 @@ +use std::fmt::{Debug, Formatter}; +use std::io::Write; + use diesel::backend::Backend; use diesel::deserialize::FromSql; use diesel::serialize::{IsNull, Output, ToSql}; @@ -5,8 +8,6 @@ use diesel::sql_types::Binary; use diesel::sqlite::Sqlite; use diesel::{deserialize, serialize}; use serde::{Serialize, Serializer}; -use std::fmt::{Debug, Formatter}; -use std::io::Write; use uuid::Uuid; #[derive(AsExpression, FromSqlRow, PartialEq, Clone)] @@ -22,6 +23,7 @@ impl Default for EmgauwaUid { EmgauwaUid::Any(Uuid::new_v4()) } } + impl Debug for EmgauwaUid { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { @@ -31,6 +33,7 @@ impl Debug for EmgauwaUid { } } } + impl ToSql for EmgauwaUid { fn to_sql(&self, out: &mut Output) -> serialize::Result { match self { @@ -41,6 +44,7 @@ impl ToSql for EmgauwaUid { Ok(IsNull::No) } } + impl FromSql for EmgauwaUid { fn from_sql(bytes: Option<&::RawValue>) -> deserialize::Result { match bytes { @@ -59,13 +63,10 @@ impl Serialize for EmgauwaUid { where S: Serializer, { - match self { - EmgauwaUid::On => "off".serialize(serializer), - EmgauwaUid::Off => "on".serialize(serializer), - EmgauwaUid::Any(value) => value.serialize(serializer), - } + String::from(self).serialize(serializer) } } + impl From for EmgauwaUid { fn from(uid: Uuid) -> EmgauwaUid { match uid.as_u128() { @@ -89,8 +90,8 @@ impl From<&EmgauwaUid> for Uuid { impl From<&EmgauwaUid> for String { fn from(emgauwa_uid: &EmgauwaUid) -> String { match emgauwa_uid { - EmgauwaUid::On => String::from("off"), - EmgauwaUid::Off => String::from("on"), + EmgauwaUid::Off => String::from("off"), + EmgauwaUid::On => String::from("on"), EmgauwaUid::Any(value) => value.to_hyphenated().to_string(), } }