Add tags for schedules

This commit is contained in:
Tobias Reisinger 2022-04-03 01:35:51 +02:00
parent f3f3d36eed
commit 75f8afd624
10 changed files with 375 additions and 184 deletions

View file

@ -1,126 +1,164 @@
CREATE TABLE controllers
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
uid VARCHAR(36)
NOT NULL
UNIQUE,
name VARCHAR(128),
ip VARCHAR(16),
port INTEGER,
relay_count INTEGER,
active BOOLEAN
NOT NULL
id
INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
uid
VARCHAR(36)
NOT NULL
UNIQUE,
name
VARCHAR(128)
NOT NULL,
ip
VARCHAR(16),
port
INTEGER,
relay_count
INTEGER,
active
BOOLEAN
NOT NULL
);
CREATE TABLE relays
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
name VARCHAR(128),
number INTEGER
NOT NULL,
controller_id INTEGER
NOT NULL
REFERENCES controllers (id)
ON DELETE CASCADE
id
INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
name
VARCHAR(128)
NOT NULL,
number
INTEGER
NOT NULL,
controller_id
INTEGER
NOT NULL
REFERENCES controllers (id)
ON DELETE CASCADE
);
CREATE TABLE schedules
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
uid BLOB
NOT NULL
UNIQUE,
name VARCHAR(128)
NOT NULL,
periods BLOB
NOT NULL
id
INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
uid
BLOB
NOT NULL
UNIQUE,
name
VARCHAR(128)
NOT NULL,
periods
BLOB
NOT NULL
);
INSERT INTO schedules (uid, name, periods) VALUES (x'00', 'off', x'');
INSERT INTO schedules (uid, name, periods) VALUES (x'01', 'on', x'00000000');
CREATE TABLE tags
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
tag VARCHAR(128)
NOT NULL
UNIQUE
id
INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
tag
VARCHAR(128)
NOT NULL
UNIQUE
);
CREATE TABLE junction_tag
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
tag_id INTEGER
NOT NULL
REFERENCES tags (id)
ON DELETE CASCADE,
relay_id INTEGER
REFERENCES relays (id)
ON DELETE CASCADE,
schedule_id INTEGER
REFERENCES schedules (id)
ON DELETE CASCADE
id
INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
tag_id
INTEGER
NOT NULL
REFERENCES tags (id)
ON DELETE CASCADE,
relay_id
INTEGER
REFERENCES relays (id)
ON DELETE CASCADE,
schedule_id
INTEGER
REFERENCES schedules (id)
ON DELETE CASCADE
);
CREATE TABLE junction_relay_schedule
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
weekday SMALLINT
NOT NULL,
relay_id INTEGER
REFERENCES relays (id)
ON DELETE CASCADE,
schedule_id INTEGER
DEFAULT 1
REFERENCES schedules (id)
ON DELETE SET DEFAULT
id
INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
weekday
SMALLINT
NOT NULL,
relay_id
INTEGER
REFERENCES relays (id)
ON DELETE CASCADE,
schedule_id
INTEGER
DEFAULT 1
REFERENCES schedules (id)
ON DELETE SET DEFAULT
);
CREATE TABLE macros
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
uid VARCHAR(36)
NOT NULL
UNIQUE,
name VARCHAR(128)
id
INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
uid
VARCHAR(36)
NOT NULL
UNIQUE,
name
VARCHAR(128)
NOT NULL
);
CREATE TABLE macro_actions
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
macro_id INTEGER
NOT NULL
REFERENCES macros (id)
ON DELETE CASCADE,
relay_id INTEGER
REFERENCES relays (id)
ON DELETE CASCADE,
schedule_id INTEGER
REFERENCES schedules (id)
ON DELETE CASCADE,
weekday SMALLINT
NOT NULL
id
INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
macro_id
INTEGER
NOT NULL
REFERENCES macros (id)
ON DELETE CASCADE,
relay_id
INTEGER
NOT NULL
REFERENCES relays (id)
ON DELETE CASCADE,
schedule_id
INTEGER
NOT NULL
REFERENCES schedules (id)
ON DELETE CASCADE,
weekday
SMALLINT
NOT NULL
);

View file

@ -1,18 +1,15 @@
use std::env;
use diesel::dsl::sql;
use diesel::prelude::*;
use diesel_migrations::embed_migrations;
use dotenv::dotenv;
use crate::types::EmgauwaUid;
use errors::DatabaseError;
use models::*;
use schema::schedules::dsl::*;
pub mod errors;
pub mod models;
pub mod schema;
pub mod schedule;
pub mod tag;
mod model_utils;
embed_migrations!("migrations");
@ -29,62 +26,3 @@ pub fn run_migrations() {
let connection = get_connection();
embedded_migrations::run(&connection).expect("Failed to run migrations.");
}
pub fn get_schedules() -> Vec<Schedule> {
let connection = get_connection();
schedules
.load::<Schedule>(&connection)
.expect("Error loading schedules")
}
pub fn get_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<Schedule, DatabaseError> {
let connection = get_connection();
let result = schedules
.filter(uid.eq(filter_uid))
.first::<Schedule>(&connection)
.or(Err(DatabaseError::NotFound))?;
Ok(result)
}
pub fn delete_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<(), DatabaseError> {
let filter_uid = match filter_uid {
EmgauwaUid::Off => Err(DatabaseError::Protected),
EmgauwaUid::On => Err(DatabaseError::Protected),
EmgauwaUid::Any(_) => Ok(filter_uid)
}?;
let connection = get_connection();
match diesel::delete(schedules.filter(uid.eq(filter_uid))).execute(&connection) {
Ok(rows) => {
if rows != 0 {
Ok(())
} else {
Err(DatabaseError::DeleteError)
}
}
Err(_) => Err(DatabaseError::DeleteError),
}
}
pub fn create_schedule(new_name: &str, new_periods: &Periods) -> Result<Schedule, DatabaseError> {
let connection = get_connection();
let new_schedule = NewSchedule {
uid: &EmgauwaUid::default(),
name: new_name,
periods: new_periods,
};
diesel::insert_into(schedules)
.values(&new_schedule)
.execute(&connection)
.or(Err(DatabaseError::InsertError))?;
let result = schedules
.find(sql("last_insert_rowid()"))
.get_result::<Schedule>(&connection)
.or(Err(DatabaseError::InsertGetError))?;
Ok(result)
}

View file

@ -3,12 +3,13 @@ use actix_web::http::StatusCode;
use serde::ser::SerializeStruct;
use serde::{Serialize, Serializer};
#[derive(Debug)]
pub enum DatabaseError {
DeleteError,
InsertError,
InsertError(diesel::result::Error),
InsertGetError,
NotFound,
Protected
Protected,
}
impl DatabaseError {
@ -37,7 +38,7 @@ impl Serialize for DatabaseError {
impl From<&DatabaseError> for String {
fn from(err: &DatabaseError) -> Self {
match err {
DatabaseError::InsertError => String::from("error on inserting into database"),
DatabaseError::InsertError(_) => String::from("error on inserting into database"),
DatabaseError::InsertGetError => {
String::from("error on retrieving new entry from database (your entry was saved)")
}

View file

@ -2,10 +2,17 @@ use diesel::sql_types::Binary;
use serde::{Deserialize, Serialize};
use crate::db::model_utils::Period;
use super::schema::schedules;
use super::schema::*;
use crate::types::EmgauwaUid;
#[derive(Serialize, Queryable)]
#[derive(Debug, Serialize, Identifiable, Queryable)]
pub struct Relay {
#[serde(skip)]
pub id: i32,
// TODO
}
#[derive(Debug, Serialize, Identifiable, Queryable)]
pub struct Schedule {
#[serde(skip)]
pub id: i32,
@ -27,7 +34,7 @@ pub struct NewSchedule<'a> {
#[sql_type = "Binary"]
pub struct Periods(pub(crate) Vec<Period>);
#[derive(Serialize, Queryable)]
#[derive(Debug, Serialize, Identifiable, Queryable, Clone)]
pub struct Tag {
pub id: i32,
pub tag: String,
@ -39,9 +46,22 @@ pub struct NewTag<'a> {
pub tag: &'a str,
}
#[derive(Insertable)]
#[table_name = "junction_tag_schedule"]
pub struct NewJunctionTagSchedule<'a> {
#[derive(Queryable, Associations, Identifiable)]
#[belongs_to(Relay)]
#[belongs_to(Schedule)]
#[belongs_to(Tag)]
#[table_name = "junction_tag"]
pub struct JunctionTag {
pub id: i32,
pub tag_id: i32,
pub schedule_id: i32,
pub relay_id: Option<i32>,
pub schedule_id: Option<i32>,
}
#[derive(Insertable)]
#[table_name = "junction_tag"]
pub struct NewJunctionTag {
pub tag_id: i32,
pub relay_id: Option<i32>,
pub schedule_id: Option<i32>,
}

109
src/db/schedule.rs Normal file
View file

@ -0,0 +1,109 @@
use diesel::dsl::sql;
use diesel::prelude::*;
use crate::types::EmgauwaUid;
use crate::db::errors::DatabaseError;
use crate::db::{get_connection, schema};
use crate::db::models::*;
use crate::db::schema::tags::dsl::tags;
use crate::db::schema::junction_tag::dsl::junction_tag;
use crate::db::schema::schedules::dsl::schedules;
use crate::db::tag::{create_junction_tag, create_tag};
pub fn get_schedule_tags(schedule: &Schedule) -> Vec<String> {
let connection = get_connection();
JunctionTag::belonging_to(schedule)
.inner_join(schema::tags::dsl::tags)
.select(schema::tags::tag)
.load::<String>(&connection)
.expect("Error loading tags")
}
pub fn get_schedules() -> Vec<Schedule> {
let connection = get_connection();
schedules
.load::<Schedule>(&connection)
.expect("Error loading schedules")
}
pub fn get_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<Schedule, DatabaseError> {
let connection = get_connection();
let result = schedules
.filter(schema::schedules::uid.eq(filter_uid))
.first::<Schedule>(&connection)
.or(Err(DatabaseError::NotFound))?;
Ok(result)
}
pub fn delete_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<(), DatabaseError> {
let filter_uid = match filter_uid {
EmgauwaUid::Off => Err(DatabaseError::Protected),
EmgauwaUid::On => Err(DatabaseError::Protected),
EmgauwaUid::Any(_) => Ok(filter_uid)
}?;
let connection = get_connection();
match diesel::delete(schedules.filter(schema::schedules::uid.eq(filter_uid))).execute(&connection) {
Ok(rows) => {
if rows != 0 {
Ok(())
} else {
Err(DatabaseError::DeleteError)
}
}
Err(_) => Err(DatabaseError::DeleteError),
}
}
pub fn create_schedule(new_name: &str, new_periods: &Periods) -> Result<Schedule, DatabaseError> {
let connection = get_connection();
let new_schedule = NewSchedule {
uid: &EmgauwaUid::default(),
name: new_name,
periods: new_periods,
};
diesel::insert_into(schedules)
.values(&new_schedule)
.execute(&connection)
.map_err(DatabaseError::InsertError)?;
let result = schedules
.find(sql("last_insert_rowid()"))
.get_result::<Schedule>(&connection)
.or(Err(DatabaseError::InsertGetError))?;
Ok(result)
}
pub fn set_schedule_tags(schedule: &Schedule, new_tags: &[String]) -> Result<(), DatabaseError> {
let connection = get_connection();
diesel::delete(junction_tag.filter(schema::junction_tag::schedule_id.eq(schedule.id)))
.execute(&connection)
.or(Err(DatabaseError::DeleteError))?;
let mut database_tags: Vec<Tag> = tags.filter(schema::tags::tag.eq_any(new_tags))
.load::<Tag>(&connection)
.expect("Error loading tags");
let mut database_tags_iter = database_tags.clone().into_iter().map(|tag_db| tag_db.tag);
// create missing tags
for new_tag in new_tags {
if !database_tags_iter.any(|t| t.eq(new_tag)) {
database_tags.push(
create_tag(new_tag).expect("Error inserting tag")
);
}
}
for database_tag in database_tags {
create_junction_tag(database_tag, None, Some(schedule))
.expect("Error saving junction between tag and schedule");
}
Ok(())
}

View file

@ -2,7 +2,7 @@ table! {
controllers (id) {
id -> Integer,
uid -> Text,
name -> Nullable<Text>,
name -> Text,
ip -> Nullable<Text>,
port -> Nullable<Integer>,
relay_count -> Nullable<Integer>,
@ -32,8 +32,8 @@ table! {
macro_actions (id) {
id -> Integer,
macro_id -> Integer,
relay_id -> Nullable<Integer>,
schedule_id -> Nullable<Integer>,
relay_id -> Integer,
schedule_id -> Integer,
weekday -> SmallInt,
}
}
@ -42,14 +42,14 @@ table! {
macros (id) {
id -> Integer,
uid -> Text,
name -> Nullable<Text>,
name -> Text,
}
}
table! {
relays (id) {
id -> Integer,
name -> Nullable<Text>,
name -> Text,
number -> Integer,
controller_id -> Integer,
}

52
src/db/tag.rs Normal file
View file

@ -0,0 +1,52 @@
use diesel::dsl::sql;
use diesel::prelude::*;
use crate::db::errors::DatabaseError;
use crate::db::{get_connection};
use crate::db::models::*;
use crate::db::schema::tags::dsl::tags;
use crate::db::schema::junction_tag::dsl::junction_tag;
pub fn create_tag(new_tag: &str) -> Result<Tag, DatabaseError> {
let connection = get_connection();
let new_tag = NewTag {
tag: new_tag,
};
diesel::insert_into(tags)
.values(&new_tag)
.execute(&connection)
.map_err(DatabaseError::InsertError)?;
let result = tags
.find(sql("last_insert_rowid()"))
.get_result::<Tag>(&connection)
.or(Err(DatabaseError::InsertGetError))?;
Ok(result)
}
pub fn create_junction_tag(target_tag: Tag, target_relay: Option<&Relay>, target_schedule: Option<&Schedule>) -> Result<JunctionTag, DatabaseError> {
let connection = get_connection();
let new_junction_tag = NewJunctionTag {
relay_id: target_relay.map(|r| r.id),
schedule_id: target_schedule.map(|s| s.id),
tag_id: target_tag.id
};
diesel::insert_into(junction_tag)
.values(&new_junction_tag)
.execute(&connection)
.map_err(DatabaseError::InsertError)?;
let result = junction_tag
.find(sql("last_insert_rowid()"))
.get_result::<JunctionTag>(&connection)
.or(Err(DatabaseError::InsertGetError))?;
Ok(result)
}

View file

@ -2,20 +2,23 @@ use std::convert::TryFrom;
use actix_web::{HttpResponse, Responder, web, get, delete};
use serde::{Serialize, Deserialize};
use crate::db;
use crate::db::models::Periods;
use crate::db::schedule::*;
use crate::handlers::errors::HandlerError;
use crate::return_models::ReturnSchedule;
use crate::types::EmgauwaUid;
#[derive(Debug, Serialize, Deserialize)]
pub struct RequestSchedule {
name: String,
periods: Periods,
tags: Vec<String>,
}
pub async fn index() -> impl Responder {
let schedules = db::get_schedules();
HttpResponse::Ok().json(schedules)
let schedules = get_schedules();
let return_schedules: Vec<ReturnSchedule> = schedules.into_iter().map(ReturnSchedule::from).collect();
HttpResponse::Ok().json(return_schedules)
}
#[get("/api/v1/schedules/{schedule_id}")]
@ -25,9 +28,9 @@ pub async fn show(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Resp
match emgauwa_uid {
Ok(uid) => {
let schedule = db::get_schedule_by_uid(uid);
let schedule = get_schedule_by_uid(uid);
match schedule {
Ok(ok) => HttpResponse::Ok().json(ok),
Ok(ok) => HttpResponse::Ok().json(ReturnSchedule::from(ok)),
Err(err) => HttpResponse::from(err),
}
},
@ -36,12 +39,19 @@ pub async fn show(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Resp
}
pub async fn add(post: web::Json<RequestSchedule>) -> impl Responder {
let new_schedule = db::create_schedule(&post.name, &post.periods);
let new_schedule = create_schedule(&post.name, &post.periods);
match new_schedule {
Ok(ok) => HttpResponse::Created().json(ok),
Err(err) => HttpResponse::from(err),
if new_schedule.is_err() {
return HttpResponse::from(new_schedule.unwrap_err())
}
let new_schedule = new_schedule.unwrap();
let result = set_schedule_tags(&new_schedule, post.tags.as_slice());
if result.is_err() {
return HttpResponse::from(result.unwrap_err());
}
HttpResponse::Created().json(ReturnSchedule::from(new_schedule))
}
#[delete("/api/v1/schedules/{schedule_id}")]
@ -53,7 +63,7 @@ pub async fn delete(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Re
EmgauwaUid::Off => HttpResponse::from(HandlerError::ProtectedSchedule),
EmgauwaUid::On => HttpResponse::from(HandlerError::ProtectedSchedule),
EmgauwaUid::Any(_) => {
match db::delete_schedule_by_uid(uid) {
match delete_schedule_by_uid(uid) {
Ok(_) => HttpResponse::Ok().json("schedule got deleted"),
Err(err) => HttpResponse::from(err)
}

View file

@ -3,6 +3,7 @@ extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
extern crate dotenv;
extern crate core;
use actix_web::{middleware, web, App, HttpServer};
use actix_web::middleware::normalize::TrailingSlash;
@ -10,6 +11,7 @@ use env_logger::{Builder, Env};
mod db;
mod handlers;
mod return_models;
mod types;
#[actix_web::main]

21
src/return_models.rs Normal file
View file

@ -0,0 +1,21 @@
use serde::{Serialize};
use crate::db::models::Schedule;
use crate::db::schedule::get_schedule_tags;
#[derive(Debug, Serialize)]
pub struct ReturnSchedule {
#[serde(flatten)]
pub schedule: Schedule,
pub tags: Vec<String>,
}
impl From<Schedule> for ReturnSchedule {
fn from(schedule: Schedule) -> Self {
let tags: Vec<String> = get_schedule_tags(&schedule);
ReturnSchedule {
schedule,
tags,
}
}
}