Add tags for schedules

This commit is contained in:
Tobias Reisinger 2022-04-03 01:35:51 +02:00
parent f3f3d36eed
commit 75f8afd624
10 changed files with 375 additions and 184 deletions

View file

@ -1,30 +1,43 @@
CREATE TABLE controllers CREATE TABLE controllers
( (
id INTEGER id
INTEGER
PRIMARY KEY PRIMARY KEY
AUTOINCREMENT AUTOINCREMENT
NOT NULL, NOT NULL,
uid VARCHAR(36) uid
VARCHAR(36)
NOT NULL NOT NULL
UNIQUE, UNIQUE,
name VARCHAR(128), name
ip VARCHAR(16), VARCHAR(128)
port INTEGER, NOT NULL,
relay_count INTEGER, ip
active BOOLEAN VARCHAR(16),
port
INTEGER,
relay_count
INTEGER,
active
BOOLEAN
NOT NULL NOT NULL
); );
CREATE TABLE relays CREATE TABLE relays
( (
id INTEGER id
INTEGER
PRIMARY KEY PRIMARY KEY
AUTOINCREMENT AUTOINCREMENT
NOT NULL, NOT NULL,
name VARCHAR(128), name
number INTEGER VARCHAR(128)
NOT NULL, NOT NULL,
controller_id INTEGER number
INTEGER
NOT NULL,
controller_id
INTEGER
NOT NULL NOT NULL
REFERENCES controllers (id) REFERENCES controllers (id)
ON DELETE CASCADE ON DELETE CASCADE
@ -32,16 +45,20 @@ CREATE TABLE relays
CREATE TABLE schedules CREATE TABLE schedules
( (
id INTEGER id
INTEGER
PRIMARY KEY PRIMARY KEY
AUTOINCREMENT AUTOINCREMENT
NOT NULL, NOT NULL,
uid BLOB uid
BLOB
NOT NULL NOT NULL
UNIQUE, UNIQUE,
name VARCHAR(128) name
VARCHAR(128)
NOT NULL, NOT NULL,
periods BLOB periods
BLOB
NOT NULL NOT NULL
); );
INSERT INTO schedules (uid, name, periods) VALUES (x'00', 'off', x''); INSERT INTO schedules (uid, name, periods) VALUES (x'00', 'off', x'');
@ -49,45 +66,55 @@ INSERT INTO schedules (uid, name, periods) VALUES (x'01', 'on', x'00000000');
CREATE TABLE tags CREATE TABLE tags
( (
id INTEGER id
INTEGER
PRIMARY KEY PRIMARY KEY
AUTOINCREMENT AUTOINCREMENT
NOT NULL, NOT NULL,
tag VARCHAR(128) tag
VARCHAR(128)
NOT NULL NOT NULL
UNIQUE UNIQUE
); );
CREATE TABLE junction_tag CREATE TABLE junction_tag
( (
id INTEGER id
INTEGER
PRIMARY KEY PRIMARY KEY
AUTOINCREMENT AUTOINCREMENT
NOT NULL, NOT NULL,
tag_id INTEGER tag_id
INTEGER
NOT NULL NOT NULL
REFERENCES tags (id) REFERENCES tags (id)
ON DELETE CASCADE, ON DELETE CASCADE,
relay_id INTEGER relay_id
INTEGER
REFERENCES relays (id) REFERENCES relays (id)
ON DELETE CASCADE, ON DELETE CASCADE,
schedule_id INTEGER schedule_id
INTEGER
REFERENCES schedules (id) REFERENCES schedules (id)
ON DELETE CASCADE ON DELETE CASCADE
); );
CREATE TABLE junction_relay_schedule CREATE TABLE junction_relay_schedule
( (
id INTEGER id
INTEGER
PRIMARY KEY PRIMARY KEY
AUTOINCREMENT AUTOINCREMENT
NOT NULL, NOT NULL,
weekday SMALLINT weekday
SMALLINT
NOT NULL, NOT NULL,
relay_id INTEGER relay_id
INTEGER
REFERENCES relays (id) REFERENCES relays (id)
ON DELETE CASCADE, ON DELETE CASCADE,
schedule_id INTEGER schedule_id
INTEGER
DEFAULT 1 DEFAULT 1
REFERENCES schedules (id) REFERENCES schedules (id)
ON DELETE SET DEFAULT ON DELETE SET DEFAULT
@ -95,32 +122,43 @@ CREATE TABLE junction_relay_schedule
CREATE TABLE macros CREATE TABLE macros
( (
id INTEGER id
INTEGER
PRIMARY KEY PRIMARY KEY
AUTOINCREMENT AUTOINCREMENT
NOT NULL, NOT NULL,
uid VARCHAR(36) uid
VARCHAR(36)
NOT NULL NOT NULL
UNIQUE, UNIQUE,
name VARCHAR(128) name
VARCHAR(128)
NOT NULL
); );
CREATE TABLE macro_actions CREATE TABLE macro_actions
( (
id INTEGER id
INTEGER
PRIMARY KEY PRIMARY KEY
AUTOINCREMENT AUTOINCREMENT
NOT NULL, NOT NULL,
macro_id INTEGER macro_id
INTEGER
NOT NULL NOT NULL
REFERENCES macros (id) REFERENCES macros (id)
ON DELETE CASCADE, ON DELETE CASCADE,
relay_id INTEGER relay_id
INTEGER
NOT NULL
REFERENCES relays (id) REFERENCES relays (id)
ON DELETE CASCADE, ON DELETE CASCADE,
schedule_id INTEGER schedule_id
INTEGER
NOT NULL
REFERENCES schedules (id) REFERENCES schedules (id)
ON DELETE CASCADE, ON DELETE CASCADE,
weekday SMALLINT weekday
SMALLINT
NOT NULL NOT NULL
); );

View file

@ -1,18 +1,15 @@
use std::env; use std::env;
use diesel::dsl::sql;
use diesel::prelude::*; use diesel::prelude::*;
use diesel_migrations::embed_migrations; use diesel_migrations::embed_migrations;
use dotenv::dotenv; use dotenv::dotenv;
use crate::types::EmgauwaUid;
use errors::DatabaseError;
use models::*;
use schema::schedules::dsl::*;
pub mod errors; pub mod errors;
pub mod models; pub mod models;
pub mod schema; pub mod schema;
pub mod schedule;
pub mod tag;
mod model_utils; mod model_utils;
embed_migrations!("migrations"); embed_migrations!("migrations");
@ -29,62 +26,3 @@ pub fn run_migrations() {
let connection = get_connection(); let connection = get_connection();
embedded_migrations::run(&connection).expect("Failed to run migrations."); embedded_migrations::run(&connection).expect("Failed to run migrations.");
} }
pub fn get_schedules() -> Vec<Schedule> {
let connection = get_connection();
schedules
.load::<Schedule>(&connection)
.expect("Error loading schedules")
}
pub fn get_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<Schedule, DatabaseError> {
let connection = get_connection();
let result = schedules
.filter(uid.eq(filter_uid))
.first::<Schedule>(&connection)
.or(Err(DatabaseError::NotFound))?;
Ok(result)
}
pub fn delete_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<(), DatabaseError> {
let filter_uid = match filter_uid {
EmgauwaUid::Off => Err(DatabaseError::Protected),
EmgauwaUid::On => Err(DatabaseError::Protected),
EmgauwaUid::Any(_) => Ok(filter_uid)
}?;
let connection = get_connection();
match diesel::delete(schedules.filter(uid.eq(filter_uid))).execute(&connection) {
Ok(rows) => {
if rows != 0 {
Ok(())
} else {
Err(DatabaseError::DeleteError)
}
}
Err(_) => Err(DatabaseError::DeleteError),
}
}
pub fn create_schedule(new_name: &str, new_periods: &Periods) -> Result<Schedule, DatabaseError> {
let connection = get_connection();
let new_schedule = NewSchedule {
uid: &EmgauwaUid::default(),
name: new_name,
periods: new_periods,
};
diesel::insert_into(schedules)
.values(&new_schedule)
.execute(&connection)
.or(Err(DatabaseError::InsertError))?;
let result = schedules
.find(sql("last_insert_rowid()"))
.get_result::<Schedule>(&connection)
.or(Err(DatabaseError::InsertGetError))?;
Ok(result)
}

View file

@ -3,12 +3,13 @@ use actix_web::http::StatusCode;
use serde::ser::SerializeStruct; use serde::ser::SerializeStruct;
use serde::{Serialize, Serializer}; use serde::{Serialize, Serializer};
#[derive(Debug)]
pub enum DatabaseError { pub enum DatabaseError {
DeleteError, DeleteError,
InsertError, InsertError(diesel::result::Error),
InsertGetError, InsertGetError,
NotFound, NotFound,
Protected Protected,
} }
impl DatabaseError { impl DatabaseError {
@ -37,7 +38,7 @@ impl Serialize for DatabaseError {
impl From<&DatabaseError> for String { impl From<&DatabaseError> for String {
fn from(err: &DatabaseError) -> Self { fn from(err: &DatabaseError) -> Self {
match err { match err {
DatabaseError::InsertError => String::from("error on inserting into database"), DatabaseError::InsertError(_) => String::from("error on inserting into database"),
DatabaseError::InsertGetError => { DatabaseError::InsertGetError => {
String::from("error on retrieving new entry from database (your entry was saved)") String::from("error on retrieving new entry from database (your entry was saved)")
} }

View file

@ -2,10 +2,17 @@ use diesel::sql_types::Binary;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::db::model_utils::Period; use crate::db::model_utils::Period;
use super::schema::schedules; use super::schema::*;
use crate::types::EmgauwaUid; use crate::types::EmgauwaUid;
#[derive(Serialize, Queryable)] #[derive(Debug, Serialize, Identifiable, Queryable)]
pub struct Relay {
#[serde(skip)]
pub id: i32,
// TODO
}
#[derive(Debug, Serialize, Identifiable, Queryable)]
pub struct Schedule { pub struct Schedule {
#[serde(skip)] #[serde(skip)]
pub id: i32, pub id: i32,
@ -27,7 +34,7 @@ pub struct NewSchedule<'a> {
#[sql_type = "Binary"] #[sql_type = "Binary"]
pub struct Periods(pub(crate) Vec<Period>); pub struct Periods(pub(crate) Vec<Period>);
#[derive(Serialize, Queryable)] #[derive(Debug, Serialize, Identifiable, Queryable, Clone)]
pub struct Tag { pub struct Tag {
pub id: i32, pub id: i32,
pub tag: String, pub tag: String,
@ -39,9 +46,22 @@ pub struct NewTag<'a> {
pub tag: &'a str, pub tag: &'a str,
} }
#[derive(Insertable)] #[derive(Queryable, Associations, Identifiable)]
#[table_name = "junction_tag_schedule"] #[belongs_to(Relay)]
pub struct NewJunctionTagSchedule<'a> { #[belongs_to(Schedule)]
#[belongs_to(Tag)]
#[table_name = "junction_tag"]
pub struct JunctionTag {
pub id: i32,
pub tag_id: i32, pub tag_id: i32,
pub schedule_id: i32, pub relay_id: Option<i32>,
pub schedule_id: Option<i32>,
}
#[derive(Insertable)]
#[table_name = "junction_tag"]
pub struct NewJunctionTag {
pub tag_id: i32,
pub relay_id: Option<i32>,
pub schedule_id: Option<i32>,
} }

109
src/db/schedule.rs Normal file
View file

@ -0,0 +1,109 @@
use diesel::dsl::sql;
use diesel::prelude::*;
use crate::types::EmgauwaUid;
use crate::db::errors::DatabaseError;
use crate::db::{get_connection, schema};
use crate::db::models::*;
use crate::db::schema::tags::dsl::tags;
use crate::db::schema::junction_tag::dsl::junction_tag;
use crate::db::schema::schedules::dsl::schedules;
use crate::db::tag::{create_junction_tag, create_tag};
pub fn get_schedule_tags(schedule: &Schedule) -> Vec<String> {
let connection = get_connection();
JunctionTag::belonging_to(schedule)
.inner_join(schema::tags::dsl::tags)
.select(schema::tags::tag)
.load::<String>(&connection)
.expect("Error loading tags")
}
pub fn get_schedules() -> Vec<Schedule> {
let connection = get_connection();
schedules
.load::<Schedule>(&connection)
.expect("Error loading schedules")
}
pub fn get_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<Schedule, DatabaseError> {
let connection = get_connection();
let result = schedules
.filter(schema::schedules::uid.eq(filter_uid))
.first::<Schedule>(&connection)
.or(Err(DatabaseError::NotFound))?;
Ok(result)
}
pub fn delete_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<(), DatabaseError> {
let filter_uid = match filter_uid {
EmgauwaUid::Off => Err(DatabaseError::Protected),
EmgauwaUid::On => Err(DatabaseError::Protected),
EmgauwaUid::Any(_) => Ok(filter_uid)
}?;
let connection = get_connection();
match diesel::delete(schedules.filter(schema::schedules::uid.eq(filter_uid))).execute(&connection) {
Ok(rows) => {
if rows != 0 {
Ok(())
} else {
Err(DatabaseError::DeleteError)
}
}
Err(_) => Err(DatabaseError::DeleteError),
}
}
pub fn create_schedule(new_name: &str, new_periods: &Periods) -> Result<Schedule, DatabaseError> {
let connection = get_connection();
let new_schedule = NewSchedule {
uid: &EmgauwaUid::default(),
name: new_name,
periods: new_periods,
};
diesel::insert_into(schedules)
.values(&new_schedule)
.execute(&connection)
.map_err(DatabaseError::InsertError)?;
let result = schedules
.find(sql("last_insert_rowid()"))
.get_result::<Schedule>(&connection)
.or(Err(DatabaseError::InsertGetError))?;
Ok(result)
}
pub fn set_schedule_tags(schedule: &Schedule, new_tags: &[String]) -> Result<(), DatabaseError> {
let connection = get_connection();
diesel::delete(junction_tag.filter(schema::junction_tag::schedule_id.eq(schedule.id)))
.execute(&connection)
.or(Err(DatabaseError::DeleteError))?;
let mut database_tags: Vec<Tag> = tags.filter(schema::tags::tag.eq_any(new_tags))
.load::<Tag>(&connection)
.expect("Error loading tags");
let mut database_tags_iter = database_tags.clone().into_iter().map(|tag_db| tag_db.tag);
// create missing tags
for new_tag in new_tags {
if !database_tags_iter.any(|t| t.eq(new_tag)) {
database_tags.push(
create_tag(new_tag).expect("Error inserting tag")
);
}
}
for database_tag in database_tags {
create_junction_tag(database_tag, None, Some(schedule))
.expect("Error saving junction between tag and schedule");
}
Ok(())
}

View file

@ -2,7 +2,7 @@ table! {
controllers (id) { controllers (id) {
id -> Integer, id -> Integer,
uid -> Text, uid -> Text,
name -> Nullable<Text>, name -> Text,
ip -> Nullable<Text>, ip -> Nullable<Text>,
port -> Nullable<Integer>, port -> Nullable<Integer>,
relay_count -> Nullable<Integer>, relay_count -> Nullable<Integer>,
@ -32,8 +32,8 @@ table! {
macro_actions (id) { macro_actions (id) {
id -> Integer, id -> Integer,
macro_id -> Integer, macro_id -> Integer,
relay_id -> Nullable<Integer>, relay_id -> Integer,
schedule_id -> Nullable<Integer>, schedule_id -> Integer,
weekday -> SmallInt, weekday -> SmallInt,
} }
} }
@ -42,14 +42,14 @@ table! {
macros (id) { macros (id) {
id -> Integer, id -> Integer,
uid -> Text, uid -> Text,
name -> Nullable<Text>, name -> Text,
} }
} }
table! { table! {
relays (id) { relays (id) {
id -> Integer, id -> Integer,
name -> Nullable<Text>, name -> Text,
number -> Integer, number -> Integer,
controller_id -> Integer, controller_id -> Integer,
} }

52
src/db/tag.rs Normal file
View file

@ -0,0 +1,52 @@
use diesel::dsl::sql;
use diesel::prelude::*;
use crate::db::errors::DatabaseError;
use crate::db::{get_connection};
use crate::db::models::*;
use crate::db::schema::tags::dsl::tags;
use crate::db::schema::junction_tag::dsl::junction_tag;
pub fn create_tag(new_tag: &str) -> Result<Tag, DatabaseError> {
let connection = get_connection();
let new_tag = NewTag {
tag: new_tag,
};
diesel::insert_into(tags)
.values(&new_tag)
.execute(&connection)
.map_err(DatabaseError::InsertError)?;
let result = tags
.find(sql("last_insert_rowid()"))
.get_result::<Tag>(&connection)
.or(Err(DatabaseError::InsertGetError))?;
Ok(result)
}
pub fn create_junction_tag(target_tag: Tag, target_relay: Option<&Relay>, target_schedule: Option<&Schedule>) -> Result<JunctionTag, DatabaseError> {
let connection = get_connection();
let new_junction_tag = NewJunctionTag {
relay_id: target_relay.map(|r| r.id),
schedule_id: target_schedule.map(|s| s.id),
tag_id: target_tag.id
};
diesel::insert_into(junction_tag)
.values(&new_junction_tag)
.execute(&connection)
.map_err(DatabaseError::InsertError)?;
let result = junction_tag
.find(sql("last_insert_rowid()"))
.get_result::<JunctionTag>(&connection)
.or(Err(DatabaseError::InsertGetError))?;
Ok(result)
}

View file

@ -2,20 +2,23 @@ use std::convert::TryFrom;
use actix_web::{HttpResponse, Responder, web, get, delete}; use actix_web::{HttpResponse, Responder, web, get, delete};
use serde::{Serialize, Deserialize}; use serde::{Serialize, Deserialize};
use crate::db;
use crate::db::models::Periods; use crate::db::models::Periods;
use crate::db::schedule::*;
use crate::handlers::errors::HandlerError; use crate::handlers::errors::HandlerError;
use crate::return_models::ReturnSchedule;
use crate::types::EmgauwaUid; use crate::types::EmgauwaUid;
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct RequestSchedule { pub struct RequestSchedule {
name: String, name: String,
periods: Periods, periods: Periods,
tags: Vec<String>,
} }
pub async fn index() -> impl Responder { pub async fn index() -> impl Responder {
let schedules = db::get_schedules(); let schedules = get_schedules();
HttpResponse::Ok().json(schedules) let return_schedules: Vec<ReturnSchedule> = schedules.into_iter().map(ReturnSchedule::from).collect();
HttpResponse::Ok().json(return_schedules)
} }
#[get("/api/v1/schedules/{schedule_id}")] #[get("/api/v1/schedules/{schedule_id}")]
@ -25,9 +28,9 @@ pub async fn show(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Resp
match emgauwa_uid { match emgauwa_uid {
Ok(uid) => { Ok(uid) => {
let schedule = db::get_schedule_by_uid(uid); let schedule = get_schedule_by_uid(uid);
match schedule { match schedule {
Ok(ok) => HttpResponse::Ok().json(ok), Ok(ok) => HttpResponse::Ok().json(ReturnSchedule::from(ok)),
Err(err) => HttpResponse::from(err), Err(err) => HttpResponse::from(err),
} }
}, },
@ -36,12 +39,19 @@ pub async fn show(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Resp
} }
pub async fn add(post: web::Json<RequestSchedule>) -> impl Responder { pub async fn add(post: web::Json<RequestSchedule>) -> impl Responder {
let new_schedule = db::create_schedule(&post.name, &post.periods); let new_schedule = create_schedule(&post.name, &post.periods);
match new_schedule { if new_schedule.is_err() {
Ok(ok) => HttpResponse::Created().json(ok), return HttpResponse::from(new_schedule.unwrap_err())
Err(err) => HttpResponse::from(err),
} }
let new_schedule = new_schedule.unwrap();
let result = set_schedule_tags(&new_schedule, post.tags.as_slice());
if result.is_err() {
return HttpResponse::from(result.unwrap_err());
}
HttpResponse::Created().json(ReturnSchedule::from(new_schedule))
} }
#[delete("/api/v1/schedules/{schedule_id}")] #[delete("/api/v1/schedules/{schedule_id}")]
@ -53,7 +63,7 @@ pub async fn delete(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Re
EmgauwaUid::Off => HttpResponse::from(HandlerError::ProtectedSchedule), EmgauwaUid::Off => HttpResponse::from(HandlerError::ProtectedSchedule),
EmgauwaUid::On => HttpResponse::from(HandlerError::ProtectedSchedule), EmgauwaUid::On => HttpResponse::from(HandlerError::ProtectedSchedule),
EmgauwaUid::Any(_) => { EmgauwaUid::Any(_) => {
match db::delete_schedule_by_uid(uid) { match delete_schedule_by_uid(uid) {
Ok(_) => HttpResponse::Ok().json("schedule got deleted"), Ok(_) => HttpResponse::Ok().json("schedule got deleted"),
Err(err) => HttpResponse::from(err) Err(err) => HttpResponse::from(err)
} }

View file

@ -3,6 +3,7 @@ extern crate diesel;
#[macro_use] #[macro_use]
extern crate diesel_migrations; extern crate diesel_migrations;
extern crate dotenv; extern crate dotenv;
extern crate core;
use actix_web::{middleware, web, App, HttpServer}; use actix_web::{middleware, web, App, HttpServer};
use actix_web::middleware::normalize::TrailingSlash; use actix_web::middleware::normalize::TrailingSlash;
@ -10,6 +11,7 @@ use env_logger::{Builder, Env};
mod db; mod db;
mod handlers; mod handlers;
mod return_models;
mod types; mod types;
#[actix_web::main] #[actix_web::main]

21
src/return_models.rs Normal file
View file

@ -0,0 +1,21 @@
use serde::{Serialize};
use crate::db::models::Schedule;
use crate::db::schedule::get_schedule_tags;
#[derive(Debug, Serialize)]
pub struct ReturnSchedule {
#[serde(flatten)]
pub schedule: Schedule,
pub tags: Vec<String>,
}
impl From<Schedule> for ReturnSchedule {
fn from(schedule: Schedule) -> Self {
let tags: Vec<String> = get_schedule_tags(&schedule);
ReturnSchedule {
schedule,
tags,
}
}
}