From 75f8afd6242b20163522475fafb2a9f399275d35 Mon Sep 17 00:00:00 2001
From: Tobias Reisinger <tobias@msrg.cc>
Date: Sun, 3 Apr 2022 01:35:51 +0200
Subject: [PATCH] Add tags for schedules

---
 migrations/2021-10-13-000000_init/up.sql | 224 +++++++++++++----------
 src/db.rs                                |  70 +------
 src/db/errors.rs                         |   7 +-
 src/db/models.rs                         |  34 +++-
 src/db/schedule.rs                       | 109 +++++++++++
 src/db/schema.rs                         |  10 +-
 src/db/tag.rs                            |  52 ++++++
 src/handlers/v1/schedules.rs             |  30 ++-
 src/main.rs                              |   2 +
 src/return_models.rs                     |  21 +++
 10 files changed, 375 insertions(+), 184 deletions(-)
 create mode 100644 src/db/schedule.rs
 create mode 100644 src/db/tag.rs
 create mode 100644 src/return_models.rs

diff --git a/migrations/2021-10-13-000000_init/up.sql b/migrations/2021-10-13-000000_init/up.sql
index acee951..62fe3aa 100644
--- a/migrations/2021-10-13-000000_init/up.sql
+++ b/migrations/2021-10-13-000000_init/up.sql
@@ -1,126 +1,164 @@
 CREATE TABLE controllers
 (
-    id      INTEGER
-            PRIMARY KEY
-            AUTOINCREMENT
-            NOT NULL,
-    uid     VARCHAR(36)
-            NOT NULL
-            UNIQUE,
-    name        VARCHAR(128),
-    ip          VARCHAR(16),
-    port        INTEGER,
-    relay_count INTEGER,
-    active      BOOLEAN
-                NOT NULL
+    id
+        INTEGER
+        PRIMARY KEY
+        AUTOINCREMENT
+        NOT NULL,
+    uid
+        VARCHAR(36)
+        NOT NULL
+        UNIQUE,
+    name
+        VARCHAR(128)
+        NOT NULL,
+    ip
+        VARCHAR(16),
+    port
+        INTEGER,
+    relay_count
+        INTEGER,
+    active
+        BOOLEAN
+        NOT NULL
 );
 
 CREATE TABLE relays
 (
-    id              INTEGER
-                    PRIMARY KEY
-                    AUTOINCREMENT
-                    NOT NULL,
-    name            VARCHAR(128),
-    number          INTEGER
-                    NOT NULL,
-    controller_id   INTEGER
-                    NOT NULL
-                    REFERENCES controllers (id)
-                    ON DELETE CASCADE
+    id
+        INTEGER
+        PRIMARY KEY
+        AUTOINCREMENT
+        NOT NULL,
+    name
+        VARCHAR(128)
+        NOT NULL,
+    number
+        INTEGER
+        NOT NULL,
+    controller_id
+        INTEGER
+        NOT NULL
+        REFERENCES controllers (id)
+            ON DELETE CASCADE
 );
 
 CREATE TABLE schedules
 (
-    id      INTEGER
-            PRIMARY KEY
-            AUTOINCREMENT
-            NOT NULL,
-    uid     BLOB
-            NOT NULL
-            UNIQUE,
-    name    VARCHAR(128)
-            NOT NULL,
-    periods BLOB
-            NOT NULL
+    id
+        INTEGER
+        PRIMARY KEY
+        AUTOINCREMENT
+        NOT NULL,
+    uid
+        BLOB
+        NOT NULL
+        UNIQUE,
+    name
+        VARCHAR(128)
+        NOT NULL,
+    periods
+        BLOB
+        NOT NULL
 );
 INSERT INTO schedules (uid, name, periods) VALUES (x'00', 'off', x'');
 INSERT INTO schedules (uid, name, periods) VALUES (x'01',  'on', x'00000000');
 
 CREATE TABLE tags
 (
-    id      INTEGER
-            PRIMARY KEY
-            AUTOINCREMENT
-            NOT NULL,
-    tag     VARCHAR(128)
-            NOT NULL
-            UNIQUE
+    id
+        INTEGER
+        PRIMARY KEY
+        AUTOINCREMENT
+        NOT NULL,
+    tag
+        VARCHAR(128)
+        NOT NULL
+        UNIQUE
 );
 
 CREATE TABLE junction_tag
 (
-    id          INTEGER
-                PRIMARY KEY
-                AUTOINCREMENT
-                NOT NULL,
-    tag_id      INTEGER
-                NOT NULL
-                REFERENCES tags (id)
-                ON DELETE CASCADE,
-    relay_id    INTEGER
-                REFERENCES relays (id)
-                ON DELETE CASCADE,
-    schedule_id INTEGER
-                REFERENCES schedules (id)
-                ON DELETE CASCADE
+    id
+        INTEGER
+        PRIMARY KEY
+        AUTOINCREMENT
+        NOT NULL,
+    tag_id
+        INTEGER
+        NOT NULL
+        REFERENCES tags (id)
+            ON DELETE CASCADE,
+    relay_id
+        INTEGER
+        REFERENCES relays (id)
+            ON DELETE CASCADE,
+    schedule_id
+        INTEGER
+        REFERENCES schedules (id)
+            ON DELETE CASCADE
 );
 
 CREATE TABLE junction_relay_schedule
 (
-    id          INTEGER
-                PRIMARY KEY
-                AUTOINCREMENT
-                NOT NULL,
-    weekday     SMALLINT
-                NOT NULL,
-    relay_id    INTEGER
-                REFERENCES relays (id)
-                ON DELETE CASCADE,
-    schedule_id INTEGER
-                DEFAULT 1
-                REFERENCES schedules (id)
-                ON DELETE SET DEFAULT
+    id
+        INTEGER
+        PRIMARY KEY
+        AUTOINCREMENT
+        NOT NULL,
+    weekday
+        SMALLINT
+        NOT NULL,
+    relay_id
+        INTEGER
+        REFERENCES relays (id)
+            ON DELETE CASCADE,
+    schedule_id
+        INTEGER
+        DEFAULT 1
+        REFERENCES schedules (id)
+            ON DELETE SET DEFAULT
 );
 
 CREATE TABLE macros
 (
-    id      INTEGER
-            PRIMARY KEY
-            AUTOINCREMENT
-            NOT NULL,
-    uid     VARCHAR(36)
-            NOT NULL
-            UNIQUE,
-    name    VARCHAR(128)
+    id
+        INTEGER
+        PRIMARY KEY
+        AUTOINCREMENT
+        NOT NULL,
+    uid
+        VARCHAR(36)
+        NOT NULL
+        UNIQUE,
+    name
+        VARCHAR(128)
+        NOT NULL
 );
 
 CREATE TABLE macro_actions
 (
-    id          INTEGER
-                PRIMARY KEY
-                AUTOINCREMENT
-                NOT NULL,
-    macro_id    INTEGER
-                NOT NULL
-                REFERENCES macros (id)
-                ON DELETE CASCADE,
-    relay_id    INTEGER
-                REFERENCES relays (id)
-                ON DELETE CASCADE,
-    schedule_id INTEGER
-                REFERENCES schedules (id)
-                ON DELETE CASCADE,
-    weekday     SMALLINT
-                NOT NULL
+    id
+        INTEGER
+        PRIMARY KEY
+        AUTOINCREMENT
+        NOT NULL,
+    macro_id
+        INTEGER
+        NOT NULL
+        REFERENCES macros (id)
+            ON DELETE CASCADE,
+    relay_id
+        INTEGER
+        NOT NULL
+        REFERENCES relays (id)
+            ON DELETE CASCADE,
+    schedule_id
+        INTEGER
+        NOT NULL
+        REFERENCES schedules (id)
+            ON DELETE CASCADE,
+    weekday
+        SMALLINT
+        NOT NULL
 );
diff --git a/src/db.rs b/src/db.rs
index 3fe5e66..284a639 100644
--- a/src/db.rs
+++ b/src/db.rs
@@ -1,18 +1,15 @@
 use std::env;
 
-use diesel::dsl::sql;
 use diesel::prelude::*;
 use diesel_migrations::embed_migrations;
 use dotenv::dotenv;
 
-use crate::types::EmgauwaUid;
-use errors::DatabaseError;
-use models::*;
-use schema::schedules::dsl::*;
-
 pub mod errors;
 pub mod models;
 pub mod schema;
+pub mod schedule;
+pub mod tag;
+
 mod model_utils;
 
 embed_migrations!("migrations");
@@ -28,63 +25,4 @@ fn get_connection() -> SqliteConnection {
 pub fn run_migrations() {
     let connection = get_connection();
     embedded_migrations::run(&connection).expect("Failed to run migrations.");
-}
-
-pub fn get_schedules() -> Vec<Schedule> {
-    let connection = get_connection();
-    schedules
-        .load::<Schedule>(&connection)
-        .expect("Error loading schedules")
-}
-
-pub fn get_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<Schedule, DatabaseError> {
-    let connection = get_connection();
-    let result = schedules
-        .filter(uid.eq(filter_uid))
-        .first::<Schedule>(&connection)
-        .or(Err(DatabaseError::NotFound))?;
-
-    Ok(result)
-}
-
-pub fn delete_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<(), DatabaseError> {
-    let filter_uid = match filter_uid {
-        EmgauwaUid::Off => Err(DatabaseError::Protected),
-        EmgauwaUid::On => Err(DatabaseError::Protected),
-        EmgauwaUid::Any(_) => Ok(filter_uid)
-    }?;
-
-    let connection = get_connection();
-    match diesel::delete(schedules.filter(uid.eq(filter_uid))).execute(&connection) {
-        Ok(rows) => {
-            if rows != 0 {
-                Ok(())
-            } else {
-                Err(DatabaseError::DeleteError)
-            }
-        }
-        Err(_) => Err(DatabaseError::DeleteError),
-    }
-}
-
-pub fn create_schedule(new_name: &str, new_periods: &Periods) -> Result<Schedule, DatabaseError> {
-    let connection = get_connection();
-
-    let new_schedule = NewSchedule {
-        uid: &EmgauwaUid::default(),
-        name: new_name,
-        periods: new_periods,
-    };
-
-    diesel::insert_into(schedules)
-        .values(&new_schedule)
-        .execute(&connection)
-        .or(Err(DatabaseError::InsertError))?;
-
-    let result = schedules
-        .find(sql("last_insert_rowid()"))
-        .get_result::<Schedule>(&connection)
-        .or(Err(DatabaseError::InsertGetError))?;
-
-    Ok(result)
-}
+}
\ No newline at end of file
diff --git a/src/db/errors.rs b/src/db/errors.rs
index 422ce8a..2b560f0 100644
--- a/src/db/errors.rs
+++ b/src/db/errors.rs
@@ -3,12 +3,13 @@ use actix_web::http::StatusCode;
 use serde::ser::SerializeStruct;
 use serde::{Serialize, Serializer};
 
+#[derive(Debug)]
 pub enum DatabaseError {
     DeleteError,
-    InsertError,
+    InsertError(diesel::result::Error),
     InsertGetError,
     NotFound,
-    Protected
+    Protected,
 }
 
 impl DatabaseError {
@@ -37,7 +38,7 @@ impl Serialize for DatabaseError {
 impl From<&DatabaseError> for String {
     fn from(err: &DatabaseError) -> Self {
         match err {
-            DatabaseError::InsertError => String::from("error on inserting into database"),
+            DatabaseError::InsertError(_) => String::from("error on inserting into database"),
             DatabaseError::InsertGetError => {
                 String::from("error on retrieving new entry from database (your entry was saved)")
             }
diff --git a/src/db/models.rs b/src/db/models.rs
index 2708f83..3c8cd13 100644
--- a/src/db/models.rs
+++ b/src/db/models.rs
@@ -2,10 +2,17 @@ use diesel::sql_types::Binary;
 use serde::{Deserialize, Serialize};
 use crate::db::model_utils::Period;
 
-use super::schema::schedules;
+use super::schema::*;
 use crate::types::EmgauwaUid;
 
-#[derive(Serialize, Queryable)]
+#[derive(Debug, Serialize, Identifiable, Queryable)]
+pub struct Relay {
+    #[serde(skip)]
+    pub id: i32,
+    // TODO
+}
+
+#[derive(Debug, Serialize, Identifiable, Queryable)]
 pub struct Schedule {
     #[serde(skip)]
     pub id: i32,
@@ -27,7 +34,7 @@ pub struct NewSchedule<'a> {
 #[sql_type = "Binary"]
 pub struct Periods(pub(crate) Vec<Period>);
 
-#[derive(Serialize, Queryable)]
+#[derive(Debug, Serialize, Identifiable, Queryable, Clone)]
 pub struct Tag {
     pub id: i32,
     pub tag: String,
@@ -39,9 +46,22 @@ pub struct NewTag<'a> {
     pub tag: &'a str,
 }
 
-#[derive(Insertable)]
-#[table_name = "junction_tag_schedule"]
-pub struct NewJunctionTagSchedule<'a> {
+#[derive(Queryable, Associations, Identifiable)]
+#[belongs_to(Relay)]
+#[belongs_to(Schedule)]
+#[belongs_to(Tag)]
+#[table_name = "junction_tag"]
+pub struct JunctionTag {
+    pub id: i32,
     pub tag_id: i32,
-    pub schedule_id: i32,
+    pub relay_id: Option<i32>,
+    pub schedule_id: Option<i32>,
+}
+
+#[derive(Insertable)]
+#[table_name = "junction_tag"]
+pub struct NewJunctionTag {
+    pub tag_id: i32,
+    pub relay_id: Option<i32>,
+    pub schedule_id: Option<i32>,
 }
diff --git a/src/db/schedule.rs b/src/db/schedule.rs
new file mode 100644
index 0000000..4c73657
--- /dev/null
+++ b/src/db/schedule.rs
@@ -0,0 +1,109 @@
+use diesel::dsl::sql;
+use diesel::prelude::*;
+
+use crate::types::EmgauwaUid;
+
+use crate::db::errors::DatabaseError;
+use crate::db::{get_connection, schema};
+use crate::db::models::*;
+use crate::db::schema::tags::dsl::tags;
+use crate::db::schema::junction_tag::dsl::junction_tag;
+use crate::db::schema::schedules::dsl::schedules;
+use crate::db::tag::{create_junction_tag, create_tag};
+
+pub fn get_schedule_tags(schedule: &Schedule) -> Vec<String> {
+    let connection = get_connection();
+    JunctionTag::belonging_to(schedule)
+        .inner_join(schema::tags::dsl::tags)
+        .select(schema::tags::tag)
+        .load::<String>(&connection)
+        .expect("Error loading tags")
+}
+
+pub fn get_schedules() -> Vec<Schedule> {
+    let connection = get_connection();
+    schedules
+        .load::<Schedule>(&connection)
+        .expect("Error loading schedules")
+}
+
+pub fn get_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<Schedule, DatabaseError> {
+    let connection = get_connection();
+    let result = schedules
+        .filter(schema::schedules::uid.eq(filter_uid))
+        .first::<Schedule>(&connection)
+        .or(Err(DatabaseError::NotFound))?;
+
+    Ok(result)
+}
+
+pub fn delete_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<(), DatabaseError> {
+    let filter_uid = match filter_uid {
+        EmgauwaUid::Off => Err(DatabaseError::Protected),
+        EmgauwaUid::On => Err(DatabaseError::Protected),
+        EmgauwaUid::Any(_) => Ok(filter_uid)
+    }?;
+
+    let connection = get_connection();
+    match diesel::delete(schedules.filter(schema::schedules::uid.eq(filter_uid))).execute(&connection) {
+        Ok(rows) => {
+            if rows != 0 {
+                Ok(())
+            } else {
+                Err(DatabaseError::DeleteError)
+            }
+        }
+        Err(_) => Err(DatabaseError::DeleteError),
+    }
+}
+
+pub fn create_schedule(new_name: &str, new_periods: &Periods) -> Result<Schedule, DatabaseError> {
+    let connection = get_connection();
+
+    let new_schedule = NewSchedule {
+        uid: &EmgauwaUid::default(),
+        name: new_name,
+        periods: new_periods,
+    };
+
+    diesel::insert_into(schedules)
+        .values(&new_schedule)
+        .execute(&connection)
+        .map_err(DatabaseError::InsertError)?;
+
+    let result = schedules
+        .find(sql("last_insert_rowid()"))
+        .get_result::<Schedule>(&connection)
+        .or(Err(DatabaseError::InsertGetError))?;
+
+    Ok(result)
+}
+
+pub fn set_schedule_tags(schedule: &Schedule, new_tags: &[String]) -> Result<(), DatabaseError> {
+    let connection = get_connection();
+    diesel::delete(junction_tag.filter(schema::junction_tag::schedule_id.eq(schedule.id)))
+        .execute(&connection)
+        .or(Err(DatabaseError::DeleteError))?;
+
+    let mut database_tags: Vec<Tag> = tags.filter(schema::tags::tag.eq_any(new_tags))
+        .load::<Tag>(&connection)
+        .expect("Error loading tags");
+
+    let mut database_tags_iter = database_tags.clone().into_iter().map(|tag_db| tag_db.tag);
+
+    // create missing tags
+    for new_tag in new_tags {
+        if !database_tags_iter.any(|t| t.eq(new_tag)) {
+            database_tags.push(
+                create_tag(new_tag).expect("Error inserting tag")
+            );
+        }
+    }
+
+    for database_tag in database_tags {
+        create_junction_tag(database_tag, None, Some(schedule))
+            .expect("Error saving junction between tag and schedule");
+    }
+
+    Ok(())
+}
diff --git a/src/db/schema.rs b/src/db/schema.rs
index 340996b..c3d800c 100644
--- a/src/db/schema.rs
+++ b/src/db/schema.rs
@@ -2,7 +2,7 @@ table! {
     controllers (id) {
         id -> Integer,
         uid -> Text,
-        name -> Nullable<Text>,
+        name -> Text,
         ip -> Nullable<Text>,
         port -> Nullable<Integer>,
         relay_count -> Nullable<Integer>,
@@ -32,8 +32,8 @@ table! {
     macro_actions (id) {
         id -> Integer,
         macro_id -> Integer,
-        relay_id -> Nullable<Integer>,
-        schedule_id -> Nullable<Integer>,
+        relay_id -> Integer,
+        schedule_id -> Integer,
         weekday -> SmallInt,
     }
 }
@@ -42,14 +42,14 @@ table! {
     macros (id) {
         id -> Integer,
         uid -> Text,
-        name -> Nullable<Text>,
+        name -> Text,
     }
 }
 
 table! {
     relays (id) {
         id -> Integer,
-        name -> Nullable<Text>,
+        name -> Text,
         number -> Integer,
         controller_id -> Integer,
     }
diff --git a/src/db/tag.rs b/src/db/tag.rs
new file mode 100644
index 0000000..a002ebc
--- /dev/null
+++ b/src/db/tag.rs
@@ -0,0 +1,52 @@
+use diesel::dsl::sql;
+use diesel::prelude::*;
+
+
+use crate::db::errors::DatabaseError;
+use crate::db::{get_connection};
+use crate::db::models::*;
+use crate::db::schema::tags::dsl::tags;
+use crate::db::schema::junction_tag::dsl::junction_tag;
+
+
+pub fn create_tag(new_tag: &str) -> Result<Tag, DatabaseError> {
+    let connection = get_connection();
+
+    let new_tag = NewTag {
+        tag: new_tag,
+    };
+
+    diesel::insert_into(tags)
+        .values(&new_tag)
+        .execute(&connection)
+        .map_err(DatabaseError::InsertError)?;
+
+    let result = tags
+        .find(sql("last_insert_rowid()"))
+        .get_result::<Tag>(&connection)
+        .or(Err(DatabaseError::InsertGetError))?;
+
+    Ok(result)
+}
+
+pub fn create_junction_tag(target_tag: Tag, target_relay: Option<&Relay>, target_schedule: Option<&Schedule>) -> Result<JunctionTag, DatabaseError> {
+    let connection = get_connection();
+
+    let new_junction_tag = NewJunctionTag {
+        relay_id: target_relay.map(|r| r.id),
+        schedule_id: target_schedule.map(|s| s.id),
+        tag_id: target_tag.id
+    };
+
+    diesel::insert_into(junction_tag)
+        .values(&new_junction_tag)
+        .execute(&connection)
+        .map_err(DatabaseError::InsertError)?;
+
+    let result = junction_tag
+        .find(sql("last_insert_rowid()"))
+        .get_result::<JunctionTag>(&connection)
+        .or(Err(DatabaseError::InsertGetError))?;
+
+    Ok(result)
+}
\ No newline at end of file
diff --git a/src/handlers/v1/schedules.rs b/src/handlers/v1/schedules.rs
index c491f2e..32ea007 100644
--- a/src/handlers/v1/schedules.rs
+++ b/src/handlers/v1/schedules.rs
@@ -2,20 +2,23 @@ use std::convert::TryFrom;
 use actix_web::{HttpResponse, Responder, web, get, delete};
 use serde::{Serialize, Deserialize};
 
-use crate::db;
 use crate::db::models::Periods;
+use crate::db::schedule::*;
 use crate::handlers::errors::HandlerError;
+use crate::return_models::ReturnSchedule;
 use crate::types::EmgauwaUid;
 
 #[derive(Debug, Serialize, Deserialize)]
 pub struct RequestSchedule {
     name: String,
     periods: Periods,
+    tags: Vec<String>,
 }
 
 pub async fn index() -> impl Responder {
-    let schedules = db::get_schedules();
-    HttpResponse::Ok().json(schedules)
+    let schedules = get_schedules();
+    let return_schedules: Vec<ReturnSchedule> = schedules.into_iter().map(ReturnSchedule::from).collect();
+    HttpResponse::Ok().json(return_schedules)
 }
 
 #[get("/api/v1/schedules/{schedule_id}")]
@@ -25,9 +28,9 @@ pub async fn show(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Resp
 
     match emgauwa_uid {
         Ok(uid) => {
-            let schedule = db::get_schedule_by_uid(uid);
+            let schedule = get_schedule_by_uid(uid);
             match schedule {
-                Ok(ok) => HttpResponse::Ok().json(ok),
+                Ok(ok) => HttpResponse::Ok().json(ReturnSchedule::from(ok)),
                 Err(err) => HttpResponse::from(err),
             }
         },
@@ -36,12 +39,19 @@ pub async fn show(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Resp
 }
 
 pub async fn add(post: web::Json<RequestSchedule>) -> impl Responder {
-    let new_schedule = db::create_schedule(&post.name, &post.periods);
+    let new_schedule = create_schedule(&post.name, &post.periods);
 
-    match new_schedule {
-        Ok(ok) => HttpResponse::Created().json(ok),
-        Err(err) => HttpResponse::from(err),
+    if new_schedule.is_err() {
+        return HttpResponse::from(new_schedule.unwrap_err())
     }
+    let new_schedule = new_schedule.unwrap();
+
+    let result = set_schedule_tags(&new_schedule, post.tags.as_slice());
+    if result.is_err() {
+        return HttpResponse::from(result.unwrap_err());
+    }
+
+    HttpResponse::Created().json(ReturnSchedule::from(new_schedule))
 }
 
 #[delete("/api/v1/schedules/{schedule_id}")]
@@ -53,7 +63,7 @@ pub async fn delete(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Re
             EmgauwaUid::Off => HttpResponse::from(HandlerError::ProtectedSchedule),
             EmgauwaUid::On => HttpResponse::from(HandlerError::ProtectedSchedule),
             EmgauwaUid::Any(_) => {
-                match db::delete_schedule_by_uid(uid) {
+                match delete_schedule_by_uid(uid) {
                     Ok(_) => HttpResponse::Ok().json("schedule got deleted"),
                     Err(err) => HttpResponse::from(err)
                 }
diff --git a/src/main.rs b/src/main.rs
index 15e165a..e3faf9a 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -3,6 +3,7 @@ extern crate diesel;
 #[macro_use]
 extern crate diesel_migrations;
 extern crate dotenv;
+extern crate core;
 
 use actix_web::{middleware, web, App, HttpServer};
 use actix_web::middleware::normalize::TrailingSlash;
@@ -10,6 +11,7 @@ use env_logger::{Builder, Env};
 
 mod db;
 mod handlers;
+mod return_models;
 mod types;
 
 #[actix_web::main]
diff --git a/src/return_models.rs b/src/return_models.rs
new file mode 100644
index 0000000..0f77a24
--- /dev/null
+++ b/src/return_models.rs
@@ -0,0 +1,21 @@
+use serde::{Serialize};
+
+use crate::db::models::Schedule;
+use crate::db::schedule::get_schedule_tags;
+
+#[derive(Debug, Serialize)]
+pub struct ReturnSchedule {
+    #[serde(flatten)]
+    pub schedule: Schedule,
+    pub tags: Vec<String>,
+}
+
+impl From<Schedule> for ReturnSchedule {
+    fn from(schedule: Schedule) -> Self {
+        let tags: Vec<String> = get_schedule_tags(&schedule);
+        ReturnSchedule {
+            schedule,
+            tags,
+        }
+    }
+}
\ No newline at end of file