Switch spaces to tabs
This commit is contained in:
parent
07aca5293e
commit
4261141c3a
17 changed files with 560 additions and 557 deletions
|
@ -8,8 +8,9 @@ root = true
|
|||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
indent_style = space
|
||||
indent_style = tab
|
||||
indent_size = 4
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.yml]
|
||||
indent_size = 2
|
||||
|
|
2
.rustfmt.toml
Normal file
2
.rustfmt.toml
Normal file
|
@ -0,0 +1,2 @@
|
|||
newline_style = "Unix"
|
||||
hard_tabs = true
|
4
build.rs
4
build.rs
|
@ -1,4 +1,4 @@
|
|||
fn main() {
|
||||
#[cfg(any(target_arch = "arm", target_arch = "aarch64"))]
|
||||
println!("cargo:rustc-link-lib=dylib=wiringPi");
|
||||
#[cfg(any(target_arch = "arm", target_arch = "aarch64"))]
|
||||
println!("cargo:rustc-link-lib=dylib=wiringPi");
|
||||
}
|
||||
|
|
12
src/db.rs
12
src/db.rs
|
@ -15,14 +15,14 @@ mod model_utils;
|
|||
embed_migrations!("migrations");
|
||||
|
||||
fn get_connection() -> SqliteConnection {
|
||||
dotenv().ok();
|
||||
dotenv().ok();
|
||||
|
||||
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
|
||||
SqliteConnection::establish(&database_url)
|
||||
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url))
|
||||
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
|
||||
SqliteConnection::establish(&database_url)
|
||||
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url))
|
||||
}
|
||||
|
||||
pub fn run_migrations() {
|
||||
let connection = get_connection();
|
||||
embedded_migrations::run(&connection).expect("Failed to run migrations.");
|
||||
let connection = get_connection();
|
||||
embedded_migrations::run(&connection).expect("Failed to run migrations.");
|
||||
}
|
||||
|
|
|
@ -5,54 +5,54 @@ use serde::{Serialize, Serializer};
|
|||
|
||||
#[derive(Debug)]
|
||||
pub enum DatabaseError {
|
||||
DeleteError,
|
||||
InsertError(diesel::result::Error),
|
||||
InsertGetError,
|
||||
NotFound,
|
||||
Protected,
|
||||
UpdateError(diesel::result::Error),
|
||||
DeleteError,
|
||||
InsertError(diesel::result::Error),
|
||||
InsertGetError,
|
||||
NotFound,
|
||||
Protected,
|
||||
UpdateError(diesel::result::Error),
|
||||
}
|
||||
|
||||
impl DatabaseError {
|
||||
fn get_code(&self) -> StatusCode {
|
||||
match self {
|
||||
DatabaseError::NotFound => StatusCode::NOT_FOUND,
|
||||
DatabaseError::Protected => StatusCode::FORBIDDEN,
|
||||
_ => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
fn get_code(&self) -> StatusCode {
|
||||
match self {
|
||||
DatabaseError::NotFound => StatusCode::NOT_FOUND,
|
||||
DatabaseError::Protected => StatusCode::FORBIDDEN,
|
||||
_ => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for DatabaseError {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_struct("error", 3)?;
|
||||
s.serialize_field("type", "database-error")?;
|
||||
s.serialize_field("code", &self.get_code().as_u16())?;
|
||||
s.serialize_field("description", &String::from(self))?;
|
||||
s.end()
|
||||
}
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_struct("error", 3)?;
|
||||
s.serialize_field("type", "database-error")?;
|
||||
s.serialize_field("code", &self.get_code().as_u16())?;
|
||||
s.serialize_field("description", &String::from(self))?;
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&DatabaseError> for String {
|
||||
fn from(err: &DatabaseError) -> Self {
|
||||
match err {
|
||||
DatabaseError::InsertError(_) => String::from("error on inserting into database"),
|
||||
DatabaseError::InsertGetError => {
|
||||
String::from("error on retrieving new entry from database (your entry was saved)")
|
||||
}
|
||||
DatabaseError::NotFound => String::from("model was not found in database"),
|
||||
DatabaseError::DeleteError => String::from("error on deleting from database"),
|
||||
DatabaseError::Protected => String::from("model is protected"),
|
||||
DatabaseError::UpdateError(_) => String::from("error on updating the model"),
|
||||
}
|
||||
}
|
||||
fn from(err: &DatabaseError) -> Self {
|
||||
match err {
|
||||
DatabaseError::InsertError(_) => String::from("error on inserting into database"),
|
||||
DatabaseError::InsertGetError => {
|
||||
String::from("error on retrieving new entry from database (your entry was saved)")
|
||||
}
|
||||
DatabaseError::NotFound => String::from("model was not found in database"),
|
||||
DatabaseError::DeleteError => String::from("error on deleting from database"),
|
||||
DatabaseError::Protected => String::from("model is protected"),
|
||||
DatabaseError::UpdateError(_) => String::from("error on updating the model"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DatabaseError> for HttpResponse {
|
||||
fn from(err: DatabaseError) -> Self {
|
||||
HttpResponse::build(err.get_code()).json(err)
|
||||
}
|
||||
fn from(err: DatabaseError) -> Self {
|
||||
HttpResponse::build(err.get_code()).json(err)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,64 +12,64 @@ use std::io::Write;
|
|||
#[derive(Debug, Serialize, Deserialize, AsExpression, FromSqlRow, PartialEq, Clone)]
|
||||
#[sql_type = "Binary"]
|
||||
pub struct Period {
|
||||
#[serde(with = "period_format")]
|
||||
pub start: NaiveTime,
|
||||
#[serde(with = "period_format")]
|
||||
pub end: NaiveTime,
|
||||
#[serde(with = "period_format")]
|
||||
pub start: NaiveTime,
|
||||
#[serde(with = "period_format")]
|
||||
pub end: NaiveTime,
|
||||
}
|
||||
|
||||
mod period_format {
|
||||
use chrono::NaiveTime;
|
||||
use serde::{self, Deserialize, Deserializer, Serializer};
|
||||
use chrono::NaiveTime;
|
||||
use serde::{self, Deserialize, Deserializer, Serializer};
|
||||
|
||||
const FORMAT: &str = "%H:%M";
|
||||
const FORMAT: &str = "%H:%M";
|
||||
|
||||
pub fn serialize<S>(time: &NaiveTime, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let s = format!("{}", time.format(FORMAT));
|
||||
serializer.serialize_str(&s)
|
||||
}
|
||||
pub fn serialize<S>(time: &NaiveTime, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let s = format!("{}", time.format(FORMAT));
|
||||
serializer.serialize_str(&s)
|
||||
}
|
||||
|
||||
pub fn deserialize<'de, D>(deserializer: D) -> Result<NaiveTime, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let s = String::deserialize(deserializer)?;
|
||||
NaiveTime::parse_from_str(&s, FORMAT).map_err(serde::de::Error::custom)
|
||||
}
|
||||
pub fn deserialize<'de, D>(deserializer: D) -> Result<NaiveTime, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let s = String::deserialize(deserializer)?;
|
||||
NaiveTime::parse_from_str(&s, FORMAT).map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql<Binary, Sqlite> for Periods {
|
||||
fn to_sql<W: Write>(&self, out: &mut Output<W, Sqlite>) -> serialize::Result {
|
||||
for period in self.0.iter() {
|
||||
out.write_all(&[
|
||||
period.start.hour() as u8,
|
||||
period.start.minute() as u8,
|
||||
period.end.hour() as u8,
|
||||
period.end.minute() as u8,
|
||||
])?;
|
||||
}
|
||||
Ok(IsNull::No)
|
||||
}
|
||||
fn to_sql<W: Write>(&self, out: &mut Output<W, Sqlite>) -> serialize::Result {
|
||||
for period in self.0.iter() {
|
||||
out.write_all(&[
|
||||
period.start.hour() as u8,
|
||||
period.start.minute() as u8,
|
||||
period.end.hour() as u8,
|
||||
period.end.minute() as u8,
|
||||
])?;
|
||||
}
|
||||
Ok(IsNull::No)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSql<Binary, Sqlite> for Periods {
|
||||
fn from_sql(bytes: Option<&<Sqlite as Backend>::RawValue>) -> deserialize::Result<Self> {
|
||||
let blob = bytes.unwrap().read_blob();
|
||||
fn from_sql(bytes: Option<&<Sqlite as Backend>::RawValue>) -> deserialize::Result<Self> {
|
||||
let blob = bytes.unwrap().read_blob();
|
||||
|
||||
let mut vec = Vec::new();
|
||||
for i in (3..blob.len()).step_by(4) {
|
||||
let start_val_h: u32 = blob[i - 3] as u32;
|
||||
let start_val_m: u32 = blob[i - 2] as u32;
|
||||
let end_val_h: u32 = blob[i - 1] as u32;
|
||||
let end_val_m: u32 = blob[i] as u32;
|
||||
vec.push(Period {
|
||||
start: NaiveTime::from_hms(start_val_h, start_val_m, 0),
|
||||
end: NaiveTime::from_hms(end_val_h, end_val_m, 0),
|
||||
});
|
||||
}
|
||||
Ok(Periods(vec))
|
||||
}
|
||||
let mut vec = Vec::new();
|
||||
for i in (3..blob.len()).step_by(4) {
|
||||
let start_val_h: u32 = blob[i - 3] as u32;
|
||||
let start_val_m: u32 = blob[i - 2] as u32;
|
||||
let end_val_h: u32 = blob[i - 1] as u32;
|
||||
let end_val_m: u32 = blob[i] as u32;
|
||||
vec.push(Period {
|
||||
start: NaiveTime::from_hms(start_val_h, start_val_m, 0),
|
||||
end: NaiveTime::from_hms(end_val_h, end_val_m, 0),
|
||||
});
|
||||
}
|
||||
Ok(Periods(vec))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,27 +7,27 @@ use crate::types::EmgauwaUid;
|
|||
|
||||
#[derive(Debug, Serialize, Identifiable, Queryable)]
|
||||
pub struct Relay {
|
||||
#[serde(skip)]
|
||||
pub id: i32,
|
||||
// TODO
|
||||
#[serde(skip)]
|
||||
pub id: i32,
|
||||
// TODO
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Identifiable, Queryable, Clone)]
|
||||
pub struct Schedule {
|
||||
#[serde(skip)]
|
||||
pub id: i32,
|
||||
#[serde(rename(serialize = "id"))]
|
||||
pub uid: EmgauwaUid,
|
||||
pub name: String,
|
||||
pub periods: Periods,
|
||||
#[serde(skip)]
|
||||
pub id: i32,
|
||||
#[serde(rename(serialize = "id"))]
|
||||
pub uid: EmgauwaUid,
|
||||
pub name: String,
|
||||
pub periods: Periods,
|
||||
}
|
||||
|
||||
#[derive(Insertable)]
|
||||
#[table_name = "schedules"]
|
||||
pub struct NewSchedule<'a> {
|
||||
pub uid: &'a EmgauwaUid,
|
||||
pub name: &'a str,
|
||||
pub periods: &'a Periods,
|
||||
pub uid: &'a EmgauwaUid,
|
||||
pub name: &'a str,
|
||||
pub periods: &'a Periods,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, AsExpression, FromSqlRow, PartialEq, Clone)]
|
||||
|
@ -36,14 +36,14 @@ pub struct Periods(pub(crate) Vec<Period>);
|
|||
|
||||
#[derive(Debug, Serialize, Identifiable, Queryable, Clone)]
|
||||
pub struct Tag {
|
||||
pub id: i32,
|
||||
pub tag: String,
|
||||
pub id: i32,
|
||||
pub tag: String,
|
||||
}
|
||||
|
||||
#[derive(Insertable)]
|
||||
#[table_name = "tags"]
|
||||
pub struct NewTag<'a> {
|
||||
pub tag: &'a str,
|
||||
pub tag: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Queryable, Associations, Identifiable)]
|
||||
|
@ -52,16 +52,16 @@ pub struct NewTag<'a> {
|
|||
#[belongs_to(Tag)]
|
||||
#[table_name = "junction_tag"]
|
||||
pub struct JunctionTag {
|
||||
pub id: i32,
|
||||
pub tag_id: i32,
|
||||
pub relay_id: Option<i32>,
|
||||
pub schedule_id: Option<i32>,
|
||||
pub id: i32,
|
||||
pub tag_id: i32,
|
||||
pub relay_id: Option<i32>,
|
||||
pub schedule_id: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Insertable)]
|
||||
#[table_name = "junction_tag"]
|
||||
pub struct NewJunctionTag {
|
||||
pub tag_id: i32,
|
||||
pub relay_id: Option<i32>,
|
||||
pub schedule_id: Option<i32>,
|
||||
pub tag_id: i32,
|
||||
pub relay_id: Option<i32>,
|
||||
pub schedule_id: Option<i32>,
|
||||
}
|
||||
|
|
|
@ -13,129 +13,129 @@ use crate::db::tag::{create_junction_tag, create_tag};
|
|||
use crate::db::{get_connection, schema};
|
||||
|
||||
pub fn get_schedule_tags(schedule: &Schedule) -> Vec<String> {
|
||||
let connection = get_connection();
|
||||
JunctionTag::belonging_to(schedule)
|
||||
.inner_join(schema::tags::dsl::tags)
|
||||
.select(schema::tags::tag)
|
||||
.load::<String>(&connection)
|
||||
.expect("Error loading tags")
|
||||
let connection = get_connection();
|
||||
JunctionTag::belonging_to(schedule)
|
||||
.inner_join(schema::tags::dsl::tags)
|
||||
.select(schema::tags::tag)
|
||||
.load::<String>(&connection)
|
||||
.expect("Error loading tags")
|
||||
}
|
||||
|
||||
pub fn get_schedules() -> Vec<Schedule> {
|
||||
let connection = get_connection();
|
||||
schedules
|
||||
.load::<Schedule>(&connection)
|
||||
.expect("Error loading schedules")
|
||||
let connection = get_connection();
|
||||
schedules
|
||||
.load::<Schedule>(&connection)
|
||||
.expect("Error loading schedules")
|
||||
}
|
||||
|
||||
pub fn get_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<Schedule, DatabaseError> {
|
||||
let connection = get_connection();
|
||||
let result = schedules
|
||||
.filter(schema::schedules::uid.eq(filter_uid))
|
||||
.first::<Schedule>(&connection)
|
||||
.or(Err(DatabaseError::NotFound))?;
|
||||
let connection = get_connection();
|
||||
let result = schedules
|
||||
.filter(schema::schedules::uid.eq(filter_uid))
|
||||
.first::<Schedule>(&connection)
|
||||
.or(Err(DatabaseError::NotFound))?;
|
||||
|
||||
Ok(result)
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
pub fn get_schedules_by_tag(tag: &Tag) -> Vec<Schedule> {
|
||||
let connection = get_connection();
|
||||
JunctionTag::belonging_to(tag)
|
||||
.inner_join(schedules)
|
||||
.select(schema::schedules::all_columns)
|
||||
.load::<Schedule>(&connection)
|
||||
.expect("Error loading tags")
|
||||
let connection = get_connection();
|
||||
JunctionTag::belonging_to(tag)
|
||||
.inner_join(schedules)
|
||||
.select(schema::schedules::all_columns)
|
||||
.load::<Schedule>(&connection)
|
||||
.expect("Error loading tags")
|
||||
}
|
||||
|
||||
pub fn delete_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<(), DatabaseError> {
|
||||
let filter_uid = match filter_uid {
|
||||
EmgauwaUid::Off => Err(DatabaseError::Protected),
|
||||
EmgauwaUid::On => Err(DatabaseError::Protected),
|
||||
EmgauwaUid::Any(_) => Ok(filter_uid),
|
||||
}?;
|
||||
let filter_uid = match filter_uid {
|
||||
EmgauwaUid::Off => Err(DatabaseError::Protected),
|
||||
EmgauwaUid::On => Err(DatabaseError::Protected),
|
||||
EmgauwaUid::Any(_) => Ok(filter_uid),
|
||||
}?;
|
||||
|
||||
let connection = get_connection();
|
||||
match diesel::delete(schedules.filter(schema::schedules::uid.eq(filter_uid)))
|
||||
.execute(&connection)
|
||||
{
|
||||
Ok(rows) => {
|
||||
if rows != 0 {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(DatabaseError::DeleteError)
|
||||
}
|
||||
}
|
||||
Err(_) => Err(DatabaseError::DeleteError),
|
||||
}
|
||||
let connection = get_connection();
|
||||
match diesel::delete(schedules.filter(schema::schedules::uid.eq(filter_uid)))
|
||||
.execute(&connection)
|
||||
{
|
||||
Ok(rows) => {
|
||||
if rows != 0 {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(DatabaseError::DeleteError)
|
||||
}
|
||||
}
|
||||
Err(_) => Err(DatabaseError::DeleteError),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_schedule(new_name: &str, new_periods: &Periods) -> Result<Schedule, DatabaseError> {
|
||||
let connection = get_connection();
|
||||
let connection = get_connection();
|
||||
|
||||
let new_schedule = NewSchedule {
|
||||
uid: &EmgauwaUid::default(),
|
||||
name: new_name,
|
||||
periods: new_periods,
|
||||
};
|
||||
let new_schedule = NewSchedule {
|
||||
uid: &EmgauwaUid::default(),
|
||||
name: new_name,
|
||||
periods: new_periods,
|
||||
};
|
||||
|
||||
diesel::insert_into(schedules)
|
||||
.values(&new_schedule)
|
||||
.execute(&connection)
|
||||
.map_err(DatabaseError::InsertError)?;
|
||||
diesel::insert_into(schedules)
|
||||
.values(&new_schedule)
|
||||
.execute(&connection)
|
||||
.map_err(DatabaseError::InsertError)?;
|
||||
|
||||
let result = schedules
|
||||
.find(sql("last_insert_rowid()"))
|
||||
.get_result::<Schedule>(&connection)
|
||||
.or(Err(DatabaseError::InsertGetError))?;
|
||||
let result = schedules
|
||||
.find(sql("last_insert_rowid()"))
|
||||
.get_result::<Schedule>(&connection)
|
||||
.or(Err(DatabaseError::InsertGetError))?;
|
||||
|
||||
Ok(result)
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
pub fn update_schedule(
|
||||
schedule: &Schedule,
|
||||
new_name: &str,
|
||||
new_periods: &Periods,
|
||||
schedule: &Schedule,
|
||||
new_name: &str,
|
||||
new_periods: &Periods,
|
||||
) -> Result<Schedule, DatabaseError> {
|
||||
let connection = get_connection();
|
||||
let connection = get_connection();
|
||||
|
||||
let new_periods = match schedule.uid {
|
||||
EmgauwaUid::Off | EmgauwaUid::On => schedule.periods.borrow(),
|
||||
EmgauwaUid::Any(_) => new_periods,
|
||||
};
|
||||
let new_periods = match schedule.uid {
|
||||
EmgauwaUid::Off | EmgauwaUid::On => schedule.periods.borrow(),
|
||||
EmgauwaUid::Any(_) => new_periods,
|
||||
};
|
||||
|
||||
diesel::update(schedule)
|
||||
.set((
|
||||
schema::schedules::name.eq(new_name),
|
||||
schema::schedules::periods.eq(new_periods),
|
||||
))
|
||||
.execute(&connection)
|
||||
.map_err(DatabaseError::UpdateError)?;
|
||||
diesel::update(schedule)
|
||||
.set((
|
||||
schema::schedules::name.eq(new_name),
|
||||
schema::schedules::periods.eq(new_periods),
|
||||
))
|
||||
.execute(&connection)
|
||||
.map_err(DatabaseError::UpdateError)?;
|
||||
|
||||
get_schedule_by_uid(schedule.uid.clone())
|
||||
get_schedule_by_uid(schedule.uid.clone())
|
||||
}
|
||||
|
||||
pub fn set_schedule_tags(schedule: &Schedule, new_tags: &[String]) -> Result<(), DatabaseError> {
|
||||
let connection = get_connection();
|
||||
diesel::delete(junction_tag.filter(schema::junction_tag::schedule_id.eq(schedule.id)))
|
||||
.execute(&connection)
|
||||
.or(Err(DatabaseError::DeleteError))?;
|
||||
let connection = get_connection();
|
||||
diesel::delete(junction_tag.filter(schema::junction_tag::schedule_id.eq(schedule.id)))
|
||||
.execute(&connection)
|
||||
.or(Err(DatabaseError::DeleteError))?;
|
||||
|
||||
let mut database_tags: Vec<Tag> = tags
|
||||
.filter(schema::tags::tag.eq_any(new_tags))
|
||||
.load::<Tag>(&connection)
|
||||
.expect("Error loading tags");
|
||||
let mut database_tags: Vec<Tag> = tags
|
||||
.filter(schema::tags::tag.eq_any(new_tags))
|
||||
.load::<Tag>(&connection)
|
||||
.expect("Error loading tags");
|
||||
|
||||
// create missing tags
|
||||
for new_tag in new_tags {
|
||||
if !database_tags.iter().any(|tab_db| tab_db.tag.eq(new_tag)) {
|
||||
database_tags.push(create_tag(new_tag).expect("Error inserting tag"));
|
||||
}
|
||||
}
|
||||
// create missing tags
|
||||
for new_tag in new_tags {
|
||||
if !database_tags.iter().any(|tab_db| tab_db.tag.eq(new_tag)) {
|
||||
database_tags.push(create_tag(new_tag).expect("Error inserting tag"));
|
||||
}
|
||||
}
|
||||
|
||||
for database_tag in database_tags {
|
||||
create_junction_tag(database_tag, None, Some(schedule))
|
||||
.expect("Error saving junction between tag and schedule");
|
||||
}
|
||||
for database_tag in database_tags {
|
||||
create_junction_tag(database_tag, None, Some(schedule))
|
||||
.expect("Error saving junction between tag and schedule");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
Ok(())
|
||||
}
|
||||
|
|
114
src/db/schema.rs
114
src/db/schema.rs
|
@ -1,74 +1,74 @@
|
|||
table! {
|
||||
controllers (id) {
|
||||
id -> Integer,
|
||||
uid -> Text,
|
||||
name -> Text,
|
||||
ip -> Nullable<Text>,
|
||||
port -> Nullable<Integer>,
|
||||
relay_count -> Nullable<Integer>,
|
||||
active -> Bool,
|
||||
}
|
||||
controllers (id) {
|
||||
id -> Integer,
|
||||
uid -> Text,
|
||||
name -> Text,
|
||||
ip -> Nullable<Text>,
|
||||
port -> Nullable<Integer>,
|
||||
relay_count -> Nullable<Integer>,
|
||||
active -> Bool,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
junction_relay_schedule (id) {
|
||||
id -> Integer,
|
||||
weekday -> SmallInt,
|
||||
relay_id -> Nullable<Integer>,
|
||||
schedule_id -> Nullable<Integer>,
|
||||
}
|
||||
junction_relay_schedule (id) {
|
||||
id -> Integer,
|
||||
weekday -> SmallInt,
|
||||
relay_id -> Nullable<Integer>,
|
||||
schedule_id -> Nullable<Integer>,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
junction_tag (id) {
|
||||
id -> Integer,
|
||||
tag_id -> Integer,
|
||||
relay_id -> Nullable<Integer>,
|
||||
schedule_id -> Nullable<Integer>,
|
||||
}
|
||||
junction_tag (id) {
|
||||
id -> Integer,
|
||||
tag_id -> Integer,
|
||||
relay_id -> Nullable<Integer>,
|
||||
schedule_id -> Nullable<Integer>,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
macro_actions (id) {
|
||||
id -> Integer,
|
||||
macro_id -> Integer,
|
||||
relay_id -> Integer,
|
||||
schedule_id -> Integer,
|
||||
weekday -> SmallInt,
|
||||
}
|
||||
macro_actions (id) {
|
||||
id -> Integer,
|
||||
macro_id -> Integer,
|
||||
relay_id -> Integer,
|
||||
schedule_id -> Integer,
|
||||
weekday -> SmallInt,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
macros (id) {
|
||||
id -> Integer,
|
||||
uid -> Text,
|
||||
name -> Text,
|
||||
}
|
||||
macros (id) {
|
||||
id -> Integer,
|
||||
uid -> Text,
|
||||
name -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
relays (id) {
|
||||
id -> Integer,
|
||||
name -> Text,
|
||||
number -> Integer,
|
||||
controller_id -> Integer,
|
||||
}
|
||||
relays (id) {
|
||||
id -> Integer,
|
||||
name -> Text,
|
||||
number -> Integer,
|
||||
controller_id -> Integer,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
schedules (id) {
|
||||
id -> Integer,
|
||||
uid -> Binary,
|
||||
name -> Text,
|
||||
periods -> Binary,
|
||||
}
|
||||
schedules (id) {
|
||||
id -> Integer,
|
||||
uid -> Binary,
|
||||
name -> Text,
|
||||
periods -> Binary,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
tags (id) {
|
||||
id -> Integer,
|
||||
tag -> Text,
|
||||
}
|
||||
tags (id) {
|
||||
id -> Integer,
|
||||
tag -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
joinable!(junction_relay_schedule -> relays (relay_id));
|
||||
|
@ -82,12 +82,12 @@ joinable!(macro_actions -> schedules (schedule_id));
|
|||
joinable!(relays -> controllers (controller_id));
|
||||
|
||||
allow_tables_to_appear_in_same_query!(
|
||||
controllers,
|
||||
junction_relay_schedule,
|
||||
junction_tag,
|
||||
macro_actions,
|
||||
macros,
|
||||
relays,
|
||||
schedules,
|
||||
tags,
|
||||
controllers,
|
||||
junction_relay_schedule,
|
||||
junction_tag,
|
||||
macro_actions,
|
||||
macros,
|
||||
relays,
|
||||
schedules,
|
||||
tags,
|
||||
);
|
||||
|
|
|
@ -8,56 +8,56 @@ use crate::db::schema::tags::dsl::tags;
|
|||
use crate::db::{get_connection, schema};
|
||||
|
||||
pub fn create_tag(new_tag: &str) -> Result<Tag, DatabaseError> {
|
||||
let connection = get_connection();
|
||||
let connection = get_connection();
|
||||
|
||||
let new_tag = NewTag { tag: new_tag };
|
||||
let new_tag = NewTag { tag: new_tag };
|
||||
|
||||
diesel::insert_into(tags)
|
||||
.values(&new_tag)
|
||||
.execute(&connection)
|
||||
.map_err(DatabaseError::InsertError)?;
|
||||
diesel::insert_into(tags)
|
||||
.values(&new_tag)
|
||||
.execute(&connection)
|
||||
.map_err(DatabaseError::InsertError)?;
|
||||
|
||||
let result = tags
|
||||
.find(sql("last_insert_rowid()"))
|
||||
.get_result::<Tag>(&connection)
|
||||
.or(Err(DatabaseError::InsertGetError))?;
|
||||
let result = tags
|
||||
.find(sql("last_insert_rowid()"))
|
||||
.get_result::<Tag>(&connection)
|
||||
.or(Err(DatabaseError::InsertGetError))?;
|
||||
|
||||
Ok(result)
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
pub fn get_tag(target_tag: &str) -> Result<Tag, DatabaseError> {
|
||||
let connection = get_connection();
|
||||
let connection = get_connection();
|
||||
|
||||
let result = tags
|
||||
.filter(schema::tags::tag.eq(target_tag))
|
||||
.first::<Tag>(&connection)
|
||||
.or(Err(DatabaseError::NotFound))?;
|
||||
let result = tags
|
||||
.filter(schema::tags::tag.eq(target_tag))
|
||||
.first::<Tag>(&connection)
|
||||
.or(Err(DatabaseError::NotFound))?;
|
||||
|
||||
Ok(result)
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
pub fn create_junction_tag(
|
||||
target_tag: Tag,
|
||||
target_relay: Option<&Relay>,
|
||||
target_schedule: Option<&Schedule>,
|
||||
target_tag: Tag,
|
||||
target_relay: Option<&Relay>,
|
||||
target_schedule: Option<&Schedule>,
|
||||
) -> Result<JunctionTag, DatabaseError> {
|
||||
let connection = get_connection();
|
||||
let connection = get_connection();
|
||||
|
||||
let new_junction_tag = NewJunctionTag {
|
||||
relay_id: target_relay.map(|r| r.id),
|
||||
schedule_id: target_schedule.map(|s| s.id),
|
||||
tag_id: target_tag.id,
|
||||
};
|
||||
let new_junction_tag = NewJunctionTag {
|
||||
relay_id: target_relay.map(|r| r.id),
|
||||
schedule_id: target_schedule.map(|s| s.id),
|
||||
tag_id: target_tag.id,
|
||||
};
|
||||
|
||||
diesel::insert_into(junction_tag)
|
||||
.values(&new_junction_tag)
|
||||
.execute(&connection)
|
||||
.map_err(DatabaseError::InsertError)?;
|
||||
diesel::insert_into(junction_tag)
|
||||
.values(&new_junction_tag)
|
||||
.execute(&connection)
|
||||
.map_err(DatabaseError::InsertError)?;
|
||||
|
||||
let result = junction_tag
|
||||
.find(sql("last_insert_rowid()"))
|
||||
.get_result::<JunctionTag>(&connection)
|
||||
.or(Err(DatabaseError::InsertGetError))?;
|
||||
let result = junction_tag
|
||||
.find(sql("last_insert_rowid()"))
|
||||
.get_result::<JunctionTag>(&connection)
|
||||
.or(Err(DatabaseError::InsertGetError))?;
|
||||
|
||||
Ok(result)
|
||||
Ok(result)
|
||||
}
|
||||
|
|
|
@ -5,42 +5,42 @@ use serde::{Serialize, Serializer};
|
|||
|
||||
#[derive(Debug)]
|
||||
pub enum HandlerError {
|
||||
BadUid,
|
||||
ProtectedSchedule,
|
||||
BadUid,
|
||||
ProtectedSchedule,
|
||||
}
|
||||
|
||||
impl HandlerError {
|
||||
fn get_code(&self) -> StatusCode {
|
||||
match self {
|
||||
HandlerError::BadUid => StatusCode::BAD_REQUEST,
|
||||
HandlerError::ProtectedSchedule => StatusCode::FORBIDDEN,
|
||||
}
|
||||
}
|
||||
fn get_code(&self) -> StatusCode {
|
||||
match self {
|
||||
HandlerError::BadUid => StatusCode::BAD_REQUEST,
|
||||
HandlerError::ProtectedSchedule => StatusCode::FORBIDDEN,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for HandlerError {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_struct("error", 2)?;
|
||||
s.serialize_field("code", &self.get_code().as_u16())?;
|
||||
s.serialize_field("description", &String::from(self))?;
|
||||
s.end()
|
||||
}
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_struct("error", 2)?;
|
||||
s.serialize_field("code", &self.get_code().as_u16())?;
|
||||
s.serialize_field("description", &String::from(self))?;
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&HandlerError> for String {
|
||||
fn from(err: &HandlerError) -> Self {
|
||||
match err {
|
||||
HandlerError::BadUid => String::from("the uid is in a bad format"),
|
||||
HandlerError::ProtectedSchedule => String::from("the targeted schedule is protected"),
|
||||
}
|
||||
}
|
||||
fn from(err: &HandlerError) -> Self {
|
||||
match err {
|
||||
HandlerError::BadUid => String::from("the uid is in a bad format"),
|
||||
HandlerError::ProtectedSchedule => String::from("the targeted schedule is protected"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<HandlerError> for HttpResponse {
|
||||
fn from(err: HandlerError) -> Self {
|
||||
HttpResponse::build(err.get_code()).json(err)
|
||||
}
|
||||
fn from(err: HandlerError) -> Self {
|
||||
HttpResponse::build(err.get_code()).json(err)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,33 +6,33 @@ pub(crate) mod errors;
|
|||
pub mod v1;
|
||||
|
||||
enum EmgauwaJsonPayLoadError {
|
||||
Error(error::JsonPayloadError),
|
||||
Error(error::JsonPayloadError),
|
||||
}
|
||||
|
||||
impl Serialize for EmgauwaJsonPayLoadError {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_struct("error", 3)?;
|
||||
s.serialize_field("type", "json-payload-error")?;
|
||||
s.serialize_field("code", &400)?;
|
||||
s.serialize_field(
|
||||
"description",
|
||||
&match self {
|
||||
EmgauwaJsonPayLoadError::Error(err) => format!("{}", err),
|
||||
},
|
||||
)?;
|
||||
s.end()
|
||||
}
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_struct("error", 3)?;
|
||||
s.serialize_field("type", "json-payload-error")?;
|
||||
s.serialize_field("code", &400)?;
|
||||
s.serialize_field(
|
||||
"description",
|
||||
&match self {
|
||||
EmgauwaJsonPayLoadError::Error(err) => format!("{}", err),
|
||||
},
|
||||
)?;
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn json_error_handler(err: error::JsonPayloadError, _: &HttpRequest) -> Error {
|
||||
error::InternalError::from_response(
|
||||
"",
|
||||
HttpResponse::BadRequest()
|
||||
.content_type("application/json")
|
||||
.json(EmgauwaJsonPayLoadError::Error(err)),
|
||||
)
|
||||
.into()
|
||||
error::InternalError::from_response(
|
||||
"",
|
||||
HttpResponse::BadRequest()
|
||||
.content_type("application/json")
|
||||
.json(EmgauwaJsonPayLoadError::Error(err)),
|
||||
)
|
||||
.into()
|
||||
}
|
||||
|
|
|
@ -14,142 +14,142 @@ use crate::utils::vec_has_error;
|
|||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct RequestSchedule {
|
||||
name: String,
|
||||
periods: Periods,
|
||||
tags: Vec<String>,
|
||||
name: String,
|
||||
periods: Periods,
|
||||
tags: Vec<String>,
|
||||
}
|
||||
|
||||
#[get("/api/v1/schedules")]
|
||||
pub async fn index() -> impl Responder {
|
||||
let schedules = get_schedules();
|
||||
let return_schedules: Vec<ReturnSchedule> =
|
||||
schedules.iter().map(ReturnSchedule::from).collect();
|
||||
HttpResponse::Ok().json(return_schedules)
|
||||
let schedules = get_schedules();
|
||||
let return_schedules: Vec<ReturnSchedule> =
|
||||
schedules.iter().map(ReturnSchedule::from).collect();
|
||||
HttpResponse::Ok().json(return_schedules)
|
||||
}
|
||||
|
||||
#[get("/api/v1/schedules/tag/{tag}")]
|
||||
pub async fn tagged(web::Path((tag,)): web::Path<(String,)>) -> impl Responder {
|
||||
let tag_db = get_tag(&tag);
|
||||
if tag_db.is_err() {
|
||||
return HttpResponse::from(tag_db.unwrap_err());
|
||||
}
|
||||
let tag_db = tag_db.unwrap();
|
||||
let tag_db = get_tag(&tag);
|
||||
if tag_db.is_err() {
|
||||
return HttpResponse::from(tag_db.unwrap_err());
|
||||
}
|
||||
let tag_db = tag_db.unwrap();
|
||||
|
||||
let schedules = get_schedules_by_tag(&tag_db);
|
||||
let return_schedules: Vec<ReturnSchedule> =
|
||||
schedules.iter().map(ReturnSchedule::from).collect();
|
||||
HttpResponse::Ok().json(return_schedules)
|
||||
let schedules = get_schedules_by_tag(&tag_db);
|
||||
let return_schedules: Vec<ReturnSchedule> =
|
||||
schedules.iter().map(ReturnSchedule::from).collect();
|
||||
HttpResponse::Ok().json(return_schedules)
|
||||
}
|
||||
|
||||
#[get("/api/v1/schedules/{schedule_id}")]
|
||||
pub async fn show(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Responder {
|
||||
let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid));
|
||||
let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid));
|
||||
|
||||
match emgauwa_uid {
|
||||
Ok(uid) => {
|
||||
let schedule = get_schedule_by_uid(uid);
|
||||
match schedule {
|
||||
Ok(ok) => HttpResponse::Ok().json(ReturnSchedule::from(ok)),
|
||||
Err(err) => HttpResponse::from(err),
|
||||
}
|
||||
}
|
||||
Err(err) => HttpResponse::from(err),
|
||||
}
|
||||
match emgauwa_uid {
|
||||
Ok(uid) => {
|
||||
let schedule = get_schedule_by_uid(uid);
|
||||
match schedule {
|
||||
Ok(ok) => HttpResponse::Ok().json(ReturnSchedule::from(ok)),
|
||||
Err(err) => HttpResponse::from(err),
|
||||
}
|
||||
}
|
||||
Err(err) => HttpResponse::from(err),
|
||||
}
|
||||
}
|
||||
|
||||
#[post("/api/v1/schedules")]
|
||||
pub async fn add(data: web::Json<RequestSchedule>) -> impl Responder {
|
||||
let new_schedule = create_schedule(&data.name, &data.periods);
|
||||
let new_schedule = create_schedule(&data.name, &data.periods);
|
||||
|
||||
if new_schedule.is_err() {
|
||||
return HttpResponse::from(new_schedule.unwrap_err());
|
||||
}
|
||||
let new_schedule = new_schedule.unwrap();
|
||||
if new_schedule.is_err() {
|
||||
return HttpResponse::from(new_schedule.unwrap_err());
|
||||
}
|
||||
let new_schedule = new_schedule.unwrap();
|
||||
|
||||
let result = set_schedule_tags(&new_schedule, data.tags.as_slice());
|
||||
if result.is_err() {
|
||||
return HttpResponse::from(result.unwrap_err());
|
||||
}
|
||||
let result = set_schedule_tags(&new_schedule, data.tags.as_slice());
|
||||
if result.is_err() {
|
||||
return HttpResponse::from(result.unwrap_err());
|
||||
}
|
||||
|
||||
HttpResponse::Created().json(ReturnSchedule::from(new_schedule))
|
||||
HttpResponse::Created().json(ReturnSchedule::from(new_schedule))
|
||||
}
|
||||
|
||||
#[post("/api/v1/schedules/list")]
|
||||
pub async fn add_list(data: web::Json<Vec<RequestSchedule>>) -> impl Responder {
|
||||
let result: Vec<Result<Schedule, DatabaseError>> = data
|
||||
.as_slice()
|
||||
.iter()
|
||||
.map(|request_schedule| {
|
||||
let new_schedule = create_schedule(&request_schedule.name, &request_schedule.periods)?;
|
||||
let result: Vec<Result<Schedule, DatabaseError>> = data
|
||||
.as_slice()
|
||||
.iter()
|
||||
.map(|request_schedule| {
|
||||
let new_schedule = create_schedule(&request_schedule.name, &request_schedule.periods)?;
|
||||
|
||||
set_schedule_tags(&new_schedule, request_schedule.tags.as_slice())?;
|
||||
set_schedule_tags(&new_schedule, request_schedule.tags.as_slice())?;
|
||||
|
||||
Ok(new_schedule)
|
||||
})
|
||||
.collect();
|
||||
Ok(new_schedule)
|
||||
})
|
||||
.collect();
|
||||
|
||||
match vec_has_error(&result) {
|
||||
true => HttpResponse::from(
|
||||
result
|
||||
.into_iter()
|
||||
.find(|r| r.is_err())
|
||||
.unwrap()
|
||||
.unwrap_err(),
|
||||
),
|
||||
false => {
|
||||
let return_schedules: Vec<ReturnSchedule> = result
|
||||
.iter()
|
||||
.map(|s| ReturnSchedule::from(s.as_ref().unwrap()))
|
||||
.collect();
|
||||
HttpResponse::Created().json(return_schedules)
|
||||
}
|
||||
}
|
||||
match vec_has_error(&result) {
|
||||
true => HttpResponse::from(
|
||||
result
|
||||
.into_iter()
|
||||
.find(|r| r.is_err())
|
||||
.unwrap()
|
||||
.unwrap_err(),
|
||||
),
|
||||
false => {
|
||||
let return_schedules: Vec<ReturnSchedule> = result
|
||||
.iter()
|
||||
.map(|s| ReturnSchedule::from(s.as_ref().unwrap()))
|
||||
.collect();
|
||||
HttpResponse::Created().json(return_schedules)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[put("/api/v1/schedules/{schedule_id}")]
|
||||
pub async fn update(
|
||||
web::Path((schedule_uid,)): web::Path<(String,)>,
|
||||
data: web::Json<RequestSchedule>,
|
||||
web::Path((schedule_uid,)): web::Path<(String,)>,
|
||||
data: web::Json<RequestSchedule>,
|
||||
) -> impl Responder {
|
||||
let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid));
|
||||
if emgauwa_uid.is_err() {
|
||||
return HttpResponse::from(emgauwa_uid.unwrap_err());
|
||||
}
|
||||
let emgauwa_uid = emgauwa_uid.unwrap();
|
||||
let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid));
|
||||
if emgauwa_uid.is_err() {
|
||||
return HttpResponse::from(emgauwa_uid.unwrap_err());
|
||||
}
|
||||
let emgauwa_uid = emgauwa_uid.unwrap();
|
||||
|
||||
let schedule = get_schedule_by_uid(emgauwa_uid);
|
||||
if schedule.is_err() {
|
||||
return HttpResponse::from(schedule.unwrap_err());
|
||||
}
|
||||
let schedule = schedule.unwrap();
|
||||
let schedule = get_schedule_by_uid(emgauwa_uid);
|
||||
if schedule.is_err() {
|
||||
return HttpResponse::from(schedule.unwrap_err());
|
||||
}
|
||||
let schedule = schedule.unwrap();
|
||||
|
||||
let schedule = update_schedule(&schedule, data.name.as_str(), data.periods.borrow());
|
||||
if schedule.is_err() {
|
||||
return HttpResponse::from(schedule.unwrap_err());
|
||||
}
|
||||
let schedule = schedule.unwrap();
|
||||
let schedule = update_schedule(&schedule, data.name.as_str(), data.periods.borrow());
|
||||
if schedule.is_err() {
|
||||
return HttpResponse::from(schedule.unwrap_err());
|
||||
}
|
||||
let schedule = schedule.unwrap();
|
||||
|
||||
let result = set_schedule_tags(&schedule, data.tags.as_slice());
|
||||
if result.is_err() {
|
||||
return HttpResponse::from(result.unwrap_err());
|
||||
}
|
||||
let result = set_schedule_tags(&schedule, data.tags.as_slice());
|
||||
if result.is_err() {
|
||||
return HttpResponse::from(result.unwrap_err());
|
||||
}
|
||||
|
||||
HttpResponse::Ok().json(ReturnSchedule::from(schedule))
|
||||
HttpResponse::Ok().json(ReturnSchedule::from(schedule))
|
||||
}
|
||||
|
||||
#[delete("/api/v1/schedules/{schedule_id}")]
|
||||
pub async fn delete(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Responder {
|
||||
let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid));
|
||||
let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid));
|
||||
|
||||
match emgauwa_uid {
|
||||
Ok(uid) => match uid {
|
||||
EmgauwaUid::Off => HttpResponse::from(HandlerError::ProtectedSchedule),
|
||||
EmgauwaUid::On => HttpResponse::from(HandlerError::ProtectedSchedule),
|
||||
EmgauwaUid::Any(_) => match delete_schedule_by_uid(uid) {
|
||||
Ok(_) => HttpResponse::Ok().json("schedule got deleted"),
|
||||
Err(err) => HttpResponse::from(err),
|
||||
},
|
||||
},
|
||||
Err(err) => HttpResponse::from(err),
|
||||
}
|
||||
match emgauwa_uid {
|
||||
Ok(uid) => match uid {
|
||||
EmgauwaUid::Off => HttpResponse::from(HandlerError::ProtectedSchedule),
|
||||
EmgauwaUid::On => HttpResponse::from(HandlerError::ProtectedSchedule),
|
||||
EmgauwaUid::Any(_) => match delete_schedule_by_uid(uid) {
|
||||
Ok(_) => HttpResponse::Ok().json("schedule got deleted"),
|
||||
Err(err) => HttpResponse::from(err),
|
||||
},
|
||||
},
|
||||
Err(err) => HttpResponse::from(err),
|
||||
}
|
||||
}
|
||||
|
|
56
src/main.rs
56
src/main.rs
|
@ -18,36 +18,36 @@ mod utils;
|
|||
|
||||
#[actix_web::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
db::run_migrations();
|
||||
db::run_migrations();
|
||||
|
||||
Builder::from_env(Env::default().default_filter_or("info")).init();
|
||||
Builder::from_env(Env::default().default_filter_or("info")).init();
|
||||
|
||||
let pi = wiringpi::setup();
|
||||
let pi = wiringpi::setup();
|
||||
|
||||
//Use WiringPi pin 0 as output
|
||||
let pin = pi.output_pin(0);
|
||||
pin.digital_write(High);
|
||||
//Use WiringPi pin 0 as output
|
||||
let pin = pi.output_pin(0);
|
||||
pin.digital_write(High);
|
||||
|
||||
HttpServer::new(|| {
|
||||
App::new()
|
||||
.wrap(
|
||||
middleware::DefaultHeaders::new()
|
||||
.header("Access-Control-Allow-Origin", "*")
|
||||
.header("Access-Control-Allow-Headers", "*")
|
||||
.header("Access-Control-Allow-Methods", "*"),
|
||||
)
|
||||
.wrap(middleware::Logger::default())
|
||||
.wrap(middleware::NormalizePath::new(TrailingSlash::Trim))
|
||||
.app_data(web::JsonConfig::default().error_handler(handlers::json_error_handler))
|
||||
.service(handlers::v1::schedules::index)
|
||||
.service(handlers::v1::schedules::tagged)
|
||||
.service(handlers::v1::schedules::show)
|
||||
.service(handlers::v1::schedules::add)
|
||||
.service(handlers::v1::schedules::add_list)
|
||||
.service(handlers::v1::schedules::update)
|
||||
.service(handlers::v1::schedules::delete)
|
||||
})
|
||||
.bind("127.0.0.1:5000")?
|
||||
.run()
|
||||
.await
|
||||
HttpServer::new(|| {
|
||||
App::new()
|
||||
.wrap(
|
||||
middleware::DefaultHeaders::new()
|
||||
.header("Access-Control-Allow-Origin", "*")
|
||||
.header("Access-Control-Allow-Headers", "*")
|
||||
.header("Access-Control-Allow-Methods", "*"),
|
||||
)
|
||||
.wrap(middleware::Logger::default())
|
||||
.wrap(middleware::NormalizePath::new(TrailingSlash::Trim))
|
||||
.app_data(web::JsonConfig::default().error_handler(handlers::json_error_handler))
|
||||
.service(handlers::v1::schedules::index)
|
||||
.service(handlers::v1::schedules::tagged)
|
||||
.service(handlers::v1::schedules::show)
|
||||
.service(handlers::v1::schedules::add)
|
||||
.service(handlers::v1::schedules::add_list)
|
||||
.service(handlers::v1::schedules::update)
|
||||
.service(handlers::v1::schedules::delete)
|
||||
})
|
||||
.bind("127.0.0.1:5000")?
|
||||
.run()
|
||||
.await
|
||||
}
|
||||
|
|
|
@ -5,20 +5,20 @@ use crate::db::schedules::get_schedule_tags;
|
|||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct ReturnSchedule {
|
||||
#[serde(flatten)]
|
||||
pub schedule: Schedule,
|
||||
pub tags: Vec<String>,
|
||||
#[serde(flatten)]
|
||||
pub schedule: Schedule,
|
||||
pub tags: Vec<String>,
|
||||
}
|
||||
|
||||
impl From<Schedule> for ReturnSchedule {
|
||||
fn from(schedule: Schedule) -> Self {
|
||||
let tags: Vec<String> = get_schedule_tags(&schedule);
|
||||
ReturnSchedule { schedule, tags }
|
||||
}
|
||||
fn from(schedule: Schedule) -> Self {
|
||||
let tags: Vec<String> = get_schedule_tags(&schedule);
|
||||
ReturnSchedule { schedule, tags }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Schedule> for ReturnSchedule {
|
||||
fn from(schedule: &Schedule) -> Self {
|
||||
ReturnSchedule::from(schedule.clone())
|
||||
}
|
||||
fn from(schedule: &Schedule) -> Self {
|
||||
ReturnSchedule::from(schedule.clone())
|
||||
}
|
||||
}
|
||||
|
|
150
src/types.rs
150
src/types.rs
|
@ -15,110 +15,110 @@ use uuid::Uuid;
|
|||
#[derive(AsExpression, FromSqlRow, PartialEq, Clone)]
|
||||
#[sql_type = "Binary"]
|
||||
pub enum EmgauwaUid {
|
||||
Off,
|
||||
On,
|
||||
Any(Uuid),
|
||||
Off,
|
||||
On,
|
||||
Any(Uuid),
|
||||
}
|
||||
|
||||
impl EmgauwaUid {
|
||||
const OFF_STR: &'static str = "off";
|
||||
const ON_STR: &'static str = "on";
|
||||
const OFF_U8: u8 = 0;
|
||||
const ON_U8: u8 = 1;
|
||||
const OFF_U128: u128 = 0;
|
||||
const ON_U128: u128 = 1;
|
||||
const OFF_STR: &'static str = "off";
|
||||
const ON_STR: &'static str = "on";
|
||||
const OFF_U8: u8 = 0;
|
||||
const ON_U8: u8 = 1;
|
||||
const OFF_U128: u128 = 0;
|
||||
const ON_U128: u128 = 1;
|
||||
}
|
||||
|
||||
impl Default for EmgauwaUid {
|
||||
fn default() -> Self {
|
||||
EmgauwaUid::Any(Uuid::new_v4())
|
||||
}
|
||||
fn default() -> Self {
|
||||
EmgauwaUid::Any(Uuid::new_v4())
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for EmgauwaUid {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
EmgauwaUid::Off => EmgauwaUid::OFF_STR.fmt(f),
|
||||
EmgauwaUid::On => EmgauwaUid::ON_STR.fmt(f),
|
||||
EmgauwaUid::Any(value) => value.fmt(f),
|
||||
}
|
||||
}
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
EmgauwaUid::Off => EmgauwaUid::OFF_STR.fmt(f),
|
||||
EmgauwaUid::On => EmgauwaUid::ON_STR.fmt(f),
|
||||
EmgauwaUid::Any(value) => value.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql<Binary, Sqlite> for EmgauwaUid {
|
||||
fn to_sql<W: Write>(&self, out: &mut Output<W, Sqlite>) -> serialize::Result {
|
||||
match self {
|
||||
EmgauwaUid::Off => out.write_all(&[EmgauwaUid::OFF_U8])?,
|
||||
EmgauwaUid::On => out.write_all(&[EmgauwaUid::ON_U8])?,
|
||||
EmgauwaUid::Any(value) => out.write_all(value.as_bytes())?,
|
||||
}
|
||||
Ok(IsNull::No)
|
||||
}
|
||||
fn to_sql<W: Write>(&self, out: &mut Output<W, Sqlite>) -> serialize::Result {
|
||||
match self {
|
||||
EmgauwaUid::Off => out.write_all(&[EmgauwaUid::OFF_U8])?,
|
||||
EmgauwaUid::On => out.write_all(&[EmgauwaUid::ON_U8])?,
|
||||
EmgauwaUid::Any(value) => out.write_all(value.as_bytes())?,
|
||||
}
|
||||
Ok(IsNull::No)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSql<Binary, Sqlite> for EmgauwaUid {
|
||||
fn from_sql(bytes: Option<&<Sqlite as Backend>::RawValue>) -> deserialize::Result<Self> {
|
||||
match bytes {
|
||||
None => Ok(EmgauwaUid::default()),
|
||||
Some(value) => match value.read_blob() {
|
||||
[EmgauwaUid::OFF_U8] => Ok(EmgauwaUid::Off),
|
||||
[EmgauwaUid::ON_U8] => Ok(EmgauwaUid::On),
|
||||
value_bytes => Ok(EmgauwaUid::Any(Uuid::from_slice(value_bytes).unwrap())),
|
||||
},
|
||||
}
|
||||
}
|
||||
fn from_sql(bytes: Option<&<Sqlite as Backend>::RawValue>) -> deserialize::Result<Self> {
|
||||
match bytes {
|
||||
None => Ok(EmgauwaUid::default()),
|
||||
Some(value) => match value.read_blob() {
|
||||
[EmgauwaUid::OFF_U8] => Ok(EmgauwaUid::Off),
|
||||
[EmgauwaUid::ON_U8] => Ok(EmgauwaUid::On),
|
||||
value_bytes => Ok(EmgauwaUid::Any(Uuid::from_slice(value_bytes).unwrap())),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for EmgauwaUid {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
String::from(self).serialize(serializer)
|
||||
}
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
String::from(self).serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Uuid> for EmgauwaUid {
|
||||
fn from(uid: Uuid) -> EmgauwaUid {
|
||||
match uid.as_u128() {
|
||||
EmgauwaUid::OFF_U128 => EmgauwaUid::Off,
|
||||
EmgauwaUid::ON_U128 => EmgauwaUid::On,
|
||||
_ => EmgauwaUid::Any(uid),
|
||||
}
|
||||
}
|
||||
fn from(uid: Uuid) -> EmgauwaUid {
|
||||
match uid.as_u128() {
|
||||
EmgauwaUid::OFF_U128 => EmgauwaUid::Off,
|
||||
EmgauwaUid::ON_U128 => EmgauwaUid::On,
|
||||
_ => EmgauwaUid::Any(uid),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for EmgauwaUid {
|
||||
type Error = uuid::Error;
|
||||
type Error = uuid::Error;
|
||||
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
match value {
|
||||
EmgauwaUid::OFF_STR => Ok(EmgauwaUid::Off),
|
||||
EmgauwaUid::ON_STR => Ok(EmgauwaUid::On),
|
||||
any => match Uuid::from_str(any) {
|
||||
Ok(uuid) => Ok(EmgauwaUid::Any(uuid)),
|
||||
Err(err) => Err(err),
|
||||
},
|
||||
}
|
||||
}
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
match value {
|
||||
EmgauwaUid::OFF_STR => Ok(EmgauwaUid::Off),
|
||||
EmgauwaUid::ON_STR => Ok(EmgauwaUid::On),
|
||||
any => match Uuid::from_str(any) {
|
||||
Ok(uuid) => Ok(EmgauwaUid::Any(uuid)),
|
||||
Err(err) => Err(err),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&EmgauwaUid> for Uuid {
|
||||
fn from(emgauwa_uid: &EmgauwaUid) -> Uuid {
|
||||
match emgauwa_uid {
|
||||
EmgauwaUid::Off => uuid::Uuid::from_u128(EmgauwaUid::OFF_U128),
|
||||
EmgauwaUid::On => uuid::Uuid::from_u128(EmgauwaUid::ON_U128),
|
||||
EmgauwaUid::Any(value) => *value,
|
||||
}
|
||||
}
|
||||
fn from(emgauwa_uid: &EmgauwaUid) -> Uuid {
|
||||
match emgauwa_uid {
|
||||
EmgauwaUid::Off => uuid::Uuid::from_u128(EmgauwaUid::OFF_U128),
|
||||
EmgauwaUid::On => uuid::Uuid::from_u128(EmgauwaUid::ON_U128),
|
||||
EmgauwaUid::Any(value) => *value,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&EmgauwaUid> for String {
|
||||
fn from(emgauwa_uid: &EmgauwaUid) -> String {
|
||||
match emgauwa_uid {
|
||||
EmgauwaUid::Off => String::from(EmgauwaUid::OFF_STR),
|
||||
EmgauwaUid::On => String::from(EmgauwaUid::ON_STR),
|
||||
EmgauwaUid::Any(value) => value.to_hyphenated().to_string(),
|
||||
}
|
||||
}
|
||||
fn from(emgauwa_uid: &EmgauwaUid) -> String {
|
||||
match emgauwa_uid {
|
||||
EmgauwaUid::Off => String::from(EmgauwaUid::OFF_STR),
|
||||
EmgauwaUid::On => String::from(EmgauwaUid::ON_STR),
|
||||
EmgauwaUid::Any(value) => value.to_hyphenated().to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
pub fn vec_has_error<T, E>(target: &[Result<T, E>]) -> bool {
|
||||
target.iter().any(|t| t.is_err())
|
||||
target.iter().any(|t| t.is_err())
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue