Switch spaces to tabs

This commit is contained in:
Tobias Reisinger 2022-07-20 00:38:09 +02:00
parent 07aca5293e
commit 4261141c3a
17 changed files with 560 additions and 557 deletions

View file

@ -8,8 +8,9 @@ root = true
[*] [*]
end_of_line = lf end_of_line = lf
insert_final_newline = true insert_final_newline = true
indent_style = space indent_style = tab
indent_size = 4 indent_size = 4
trim_trailing_whitespace = true
[*.yml] [*.yml]
indent_size = 2 indent_size = 2

2
.rustfmt.toml Normal file
View file

@ -0,0 +1,2 @@
newline_style = "Unix"
hard_tabs = true

View file

@ -1,4 +1,4 @@
fn main() { fn main() {
#[cfg(any(target_arch = "arm", target_arch = "aarch64"))] #[cfg(any(target_arch = "arm", target_arch = "aarch64"))]
println!("cargo:rustc-link-lib=dylib=wiringPi"); println!("cargo:rustc-link-lib=dylib=wiringPi");
} }

View file

@ -15,14 +15,14 @@ mod model_utils;
embed_migrations!("migrations"); embed_migrations!("migrations");
fn get_connection() -> SqliteConnection { fn get_connection() -> SqliteConnection {
dotenv().ok(); dotenv().ok();
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set"); let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
SqliteConnection::establish(&database_url) SqliteConnection::establish(&database_url)
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url)) .unwrap_or_else(|_| panic!("Error connecting to {}", database_url))
} }
pub fn run_migrations() { pub fn run_migrations() {
let connection = get_connection(); let connection = get_connection();
embedded_migrations::run(&connection).expect("Failed to run migrations."); embedded_migrations::run(&connection).expect("Failed to run migrations.");
} }

View file

@ -5,54 +5,54 @@ use serde::{Serialize, Serializer};
#[derive(Debug)] #[derive(Debug)]
pub enum DatabaseError { pub enum DatabaseError {
DeleteError, DeleteError,
InsertError(diesel::result::Error), InsertError(diesel::result::Error),
InsertGetError, InsertGetError,
NotFound, NotFound,
Protected, Protected,
UpdateError(diesel::result::Error), UpdateError(diesel::result::Error),
} }
impl DatabaseError { impl DatabaseError {
fn get_code(&self) -> StatusCode { fn get_code(&self) -> StatusCode {
match self { match self {
DatabaseError::NotFound => StatusCode::NOT_FOUND, DatabaseError::NotFound => StatusCode::NOT_FOUND,
DatabaseError::Protected => StatusCode::FORBIDDEN, DatabaseError::Protected => StatusCode::FORBIDDEN,
_ => StatusCode::INTERNAL_SERVER_ERROR, _ => StatusCode::INTERNAL_SERVER_ERROR,
} }
} }
} }
impl Serialize for DatabaseError { impl Serialize for DatabaseError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where where
S: Serializer, S: Serializer,
{ {
let mut s = serializer.serialize_struct("error", 3)?; let mut s = serializer.serialize_struct("error", 3)?;
s.serialize_field("type", "database-error")?; s.serialize_field("type", "database-error")?;
s.serialize_field("code", &self.get_code().as_u16())?; s.serialize_field("code", &self.get_code().as_u16())?;
s.serialize_field("description", &String::from(self))?; s.serialize_field("description", &String::from(self))?;
s.end() s.end()
} }
} }
impl From<&DatabaseError> for String { impl From<&DatabaseError> for String {
fn from(err: &DatabaseError) -> Self { fn from(err: &DatabaseError) -> Self {
match err { match err {
DatabaseError::InsertError(_) => String::from("error on inserting into database"), DatabaseError::InsertError(_) => String::from("error on inserting into database"),
DatabaseError::InsertGetError => { DatabaseError::InsertGetError => {
String::from("error on retrieving new entry from database (your entry was saved)") String::from("error on retrieving new entry from database (your entry was saved)")
} }
DatabaseError::NotFound => String::from("model was not found in database"), DatabaseError::NotFound => String::from("model was not found in database"),
DatabaseError::DeleteError => String::from("error on deleting from database"), DatabaseError::DeleteError => String::from("error on deleting from database"),
DatabaseError::Protected => String::from("model is protected"), DatabaseError::Protected => String::from("model is protected"),
DatabaseError::UpdateError(_) => String::from("error on updating the model"), DatabaseError::UpdateError(_) => String::from("error on updating the model"),
} }
} }
} }
impl From<DatabaseError> for HttpResponse { impl From<DatabaseError> for HttpResponse {
fn from(err: DatabaseError) -> Self { fn from(err: DatabaseError) -> Self {
HttpResponse::build(err.get_code()).json(err) HttpResponse::build(err.get_code()).json(err)
} }
} }

View file

@ -12,64 +12,64 @@ use std::io::Write;
#[derive(Debug, Serialize, Deserialize, AsExpression, FromSqlRow, PartialEq, Clone)] #[derive(Debug, Serialize, Deserialize, AsExpression, FromSqlRow, PartialEq, Clone)]
#[sql_type = "Binary"] #[sql_type = "Binary"]
pub struct Period { pub struct Period {
#[serde(with = "period_format")] #[serde(with = "period_format")]
pub start: NaiveTime, pub start: NaiveTime,
#[serde(with = "period_format")] #[serde(with = "period_format")]
pub end: NaiveTime, pub end: NaiveTime,
} }
mod period_format { mod period_format {
use chrono::NaiveTime; use chrono::NaiveTime;
use serde::{self, Deserialize, Deserializer, Serializer}; use serde::{self, Deserialize, Deserializer, Serializer};
const FORMAT: &str = "%H:%M"; const FORMAT: &str = "%H:%M";
pub fn serialize<S>(time: &NaiveTime, serializer: S) -> Result<S::Ok, S::Error> pub fn serialize<S>(time: &NaiveTime, serializer: S) -> Result<S::Ok, S::Error>
where where
S: Serializer, S: Serializer,
{ {
let s = format!("{}", time.format(FORMAT)); let s = format!("{}", time.format(FORMAT));
serializer.serialize_str(&s) serializer.serialize_str(&s)
} }
pub fn deserialize<'de, D>(deserializer: D) -> Result<NaiveTime, D::Error> pub fn deserialize<'de, D>(deserializer: D) -> Result<NaiveTime, D::Error>
where where
D: Deserializer<'de>, D: Deserializer<'de>,
{ {
let s = String::deserialize(deserializer)?; let s = String::deserialize(deserializer)?;
NaiveTime::parse_from_str(&s, FORMAT).map_err(serde::de::Error::custom) NaiveTime::parse_from_str(&s, FORMAT).map_err(serde::de::Error::custom)
} }
} }
impl ToSql<Binary, Sqlite> for Periods { impl ToSql<Binary, Sqlite> for Periods {
fn to_sql<W: Write>(&self, out: &mut Output<W, Sqlite>) -> serialize::Result { fn to_sql<W: Write>(&self, out: &mut Output<W, Sqlite>) -> serialize::Result {
for period in self.0.iter() { for period in self.0.iter() {
out.write_all(&[ out.write_all(&[
period.start.hour() as u8, period.start.hour() as u8,
period.start.minute() as u8, period.start.minute() as u8,
period.end.hour() as u8, period.end.hour() as u8,
period.end.minute() as u8, period.end.minute() as u8,
])?; ])?;
} }
Ok(IsNull::No) Ok(IsNull::No)
} }
} }
impl FromSql<Binary, Sqlite> for Periods { impl FromSql<Binary, Sqlite> for Periods {
fn from_sql(bytes: Option<&<Sqlite as Backend>::RawValue>) -> deserialize::Result<Self> { fn from_sql(bytes: Option<&<Sqlite as Backend>::RawValue>) -> deserialize::Result<Self> {
let blob = bytes.unwrap().read_blob(); let blob = bytes.unwrap().read_blob();
let mut vec = Vec::new(); let mut vec = Vec::new();
for i in (3..blob.len()).step_by(4) { for i in (3..blob.len()).step_by(4) {
let start_val_h: u32 = blob[i - 3] as u32; let start_val_h: u32 = blob[i - 3] as u32;
let start_val_m: u32 = blob[i - 2] as u32; let start_val_m: u32 = blob[i - 2] as u32;
let end_val_h: u32 = blob[i - 1] as u32; let end_val_h: u32 = blob[i - 1] as u32;
let end_val_m: u32 = blob[i] as u32; let end_val_m: u32 = blob[i] as u32;
vec.push(Period { vec.push(Period {
start: NaiveTime::from_hms(start_val_h, start_val_m, 0), start: NaiveTime::from_hms(start_val_h, start_val_m, 0),
end: NaiveTime::from_hms(end_val_h, end_val_m, 0), end: NaiveTime::from_hms(end_val_h, end_val_m, 0),
}); });
} }
Ok(Periods(vec)) Ok(Periods(vec))
} }
} }

View file

@ -7,27 +7,27 @@ use crate::types::EmgauwaUid;
#[derive(Debug, Serialize, Identifiable, Queryable)] #[derive(Debug, Serialize, Identifiable, Queryable)]
pub struct Relay { pub struct Relay {
#[serde(skip)] #[serde(skip)]
pub id: i32, pub id: i32,
// TODO // TODO
} }
#[derive(Debug, Serialize, Identifiable, Queryable, Clone)] #[derive(Debug, Serialize, Identifiable, Queryable, Clone)]
pub struct Schedule { pub struct Schedule {
#[serde(skip)] #[serde(skip)]
pub id: i32, pub id: i32,
#[serde(rename(serialize = "id"))] #[serde(rename(serialize = "id"))]
pub uid: EmgauwaUid, pub uid: EmgauwaUid,
pub name: String, pub name: String,
pub periods: Periods, pub periods: Periods,
} }
#[derive(Insertable)] #[derive(Insertable)]
#[table_name = "schedules"] #[table_name = "schedules"]
pub struct NewSchedule<'a> { pub struct NewSchedule<'a> {
pub uid: &'a EmgauwaUid, pub uid: &'a EmgauwaUid,
pub name: &'a str, pub name: &'a str,
pub periods: &'a Periods, pub periods: &'a Periods,
} }
#[derive(Debug, Serialize, Deserialize, AsExpression, FromSqlRow, PartialEq, Clone)] #[derive(Debug, Serialize, Deserialize, AsExpression, FromSqlRow, PartialEq, Clone)]
@ -36,14 +36,14 @@ pub struct Periods(pub(crate) Vec<Period>);
#[derive(Debug, Serialize, Identifiable, Queryable, Clone)] #[derive(Debug, Serialize, Identifiable, Queryable, Clone)]
pub struct Tag { pub struct Tag {
pub id: i32, pub id: i32,
pub tag: String, pub tag: String,
} }
#[derive(Insertable)] #[derive(Insertable)]
#[table_name = "tags"] #[table_name = "tags"]
pub struct NewTag<'a> { pub struct NewTag<'a> {
pub tag: &'a str, pub tag: &'a str,
} }
#[derive(Queryable, Associations, Identifiable)] #[derive(Queryable, Associations, Identifiable)]
@ -52,16 +52,16 @@ pub struct NewTag<'a> {
#[belongs_to(Tag)] #[belongs_to(Tag)]
#[table_name = "junction_tag"] #[table_name = "junction_tag"]
pub struct JunctionTag { pub struct JunctionTag {
pub id: i32, pub id: i32,
pub tag_id: i32, pub tag_id: i32,
pub relay_id: Option<i32>, pub relay_id: Option<i32>,
pub schedule_id: Option<i32>, pub schedule_id: Option<i32>,
} }
#[derive(Insertable)] #[derive(Insertable)]
#[table_name = "junction_tag"] #[table_name = "junction_tag"]
pub struct NewJunctionTag { pub struct NewJunctionTag {
pub tag_id: i32, pub tag_id: i32,
pub relay_id: Option<i32>, pub relay_id: Option<i32>,
pub schedule_id: Option<i32>, pub schedule_id: Option<i32>,
} }

View file

@ -13,129 +13,129 @@ use crate::db::tag::{create_junction_tag, create_tag};
use crate::db::{get_connection, schema}; use crate::db::{get_connection, schema};
pub fn get_schedule_tags(schedule: &Schedule) -> Vec<String> { pub fn get_schedule_tags(schedule: &Schedule) -> Vec<String> {
let connection = get_connection(); let connection = get_connection();
JunctionTag::belonging_to(schedule) JunctionTag::belonging_to(schedule)
.inner_join(schema::tags::dsl::tags) .inner_join(schema::tags::dsl::tags)
.select(schema::tags::tag) .select(schema::tags::tag)
.load::<String>(&connection) .load::<String>(&connection)
.expect("Error loading tags") .expect("Error loading tags")
} }
pub fn get_schedules() -> Vec<Schedule> { pub fn get_schedules() -> Vec<Schedule> {
let connection = get_connection(); let connection = get_connection();
schedules schedules
.load::<Schedule>(&connection) .load::<Schedule>(&connection)
.expect("Error loading schedules") .expect("Error loading schedules")
} }
pub fn get_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<Schedule, DatabaseError> { pub fn get_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<Schedule, DatabaseError> {
let connection = get_connection(); let connection = get_connection();
let result = schedules let result = schedules
.filter(schema::schedules::uid.eq(filter_uid)) .filter(schema::schedules::uid.eq(filter_uid))
.first::<Schedule>(&connection) .first::<Schedule>(&connection)
.or(Err(DatabaseError::NotFound))?; .or(Err(DatabaseError::NotFound))?;
Ok(result) Ok(result)
} }
pub fn get_schedules_by_tag(tag: &Tag) -> Vec<Schedule> { pub fn get_schedules_by_tag(tag: &Tag) -> Vec<Schedule> {
let connection = get_connection(); let connection = get_connection();
JunctionTag::belonging_to(tag) JunctionTag::belonging_to(tag)
.inner_join(schedules) .inner_join(schedules)
.select(schema::schedules::all_columns) .select(schema::schedules::all_columns)
.load::<Schedule>(&connection) .load::<Schedule>(&connection)
.expect("Error loading tags") .expect("Error loading tags")
} }
pub fn delete_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<(), DatabaseError> { pub fn delete_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<(), DatabaseError> {
let filter_uid = match filter_uid { let filter_uid = match filter_uid {
EmgauwaUid::Off => Err(DatabaseError::Protected), EmgauwaUid::Off => Err(DatabaseError::Protected),
EmgauwaUid::On => Err(DatabaseError::Protected), EmgauwaUid::On => Err(DatabaseError::Protected),
EmgauwaUid::Any(_) => Ok(filter_uid), EmgauwaUid::Any(_) => Ok(filter_uid),
}?; }?;
let connection = get_connection(); let connection = get_connection();
match diesel::delete(schedules.filter(schema::schedules::uid.eq(filter_uid))) match diesel::delete(schedules.filter(schema::schedules::uid.eq(filter_uid)))
.execute(&connection) .execute(&connection)
{ {
Ok(rows) => { Ok(rows) => {
if rows != 0 { if rows != 0 {
Ok(()) Ok(())
} else { } else {
Err(DatabaseError::DeleteError) Err(DatabaseError::DeleteError)
} }
} }
Err(_) => Err(DatabaseError::DeleteError), Err(_) => Err(DatabaseError::DeleteError),
} }
} }
pub fn create_schedule(new_name: &str, new_periods: &Periods) -> Result<Schedule, DatabaseError> { pub fn create_schedule(new_name: &str, new_periods: &Periods) -> Result<Schedule, DatabaseError> {
let connection = get_connection(); let connection = get_connection();
let new_schedule = NewSchedule { let new_schedule = NewSchedule {
uid: &EmgauwaUid::default(), uid: &EmgauwaUid::default(),
name: new_name, name: new_name,
periods: new_periods, periods: new_periods,
}; };
diesel::insert_into(schedules) diesel::insert_into(schedules)
.values(&new_schedule) .values(&new_schedule)
.execute(&connection) .execute(&connection)
.map_err(DatabaseError::InsertError)?; .map_err(DatabaseError::InsertError)?;
let result = schedules let result = schedules
.find(sql("last_insert_rowid()")) .find(sql("last_insert_rowid()"))
.get_result::<Schedule>(&connection) .get_result::<Schedule>(&connection)
.or(Err(DatabaseError::InsertGetError))?; .or(Err(DatabaseError::InsertGetError))?;
Ok(result) Ok(result)
} }
pub fn update_schedule( pub fn update_schedule(
schedule: &Schedule, schedule: &Schedule,
new_name: &str, new_name: &str,
new_periods: &Periods, new_periods: &Periods,
) -> Result<Schedule, DatabaseError> { ) -> Result<Schedule, DatabaseError> {
let connection = get_connection(); let connection = get_connection();
let new_periods = match schedule.uid { let new_periods = match schedule.uid {
EmgauwaUid::Off | EmgauwaUid::On => schedule.periods.borrow(), EmgauwaUid::Off | EmgauwaUid::On => schedule.periods.borrow(),
EmgauwaUid::Any(_) => new_periods, EmgauwaUid::Any(_) => new_periods,
}; };
diesel::update(schedule) diesel::update(schedule)
.set(( .set((
schema::schedules::name.eq(new_name), schema::schedules::name.eq(new_name),
schema::schedules::periods.eq(new_periods), schema::schedules::periods.eq(new_periods),
)) ))
.execute(&connection) .execute(&connection)
.map_err(DatabaseError::UpdateError)?; .map_err(DatabaseError::UpdateError)?;
get_schedule_by_uid(schedule.uid.clone()) get_schedule_by_uid(schedule.uid.clone())
} }
pub fn set_schedule_tags(schedule: &Schedule, new_tags: &[String]) -> Result<(), DatabaseError> { pub fn set_schedule_tags(schedule: &Schedule, new_tags: &[String]) -> Result<(), DatabaseError> {
let connection = get_connection(); let connection = get_connection();
diesel::delete(junction_tag.filter(schema::junction_tag::schedule_id.eq(schedule.id))) diesel::delete(junction_tag.filter(schema::junction_tag::schedule_id.eq(schedule.id)))
.execute(&connection) .execute(&connection)
.or(Err(DatabaseError::DeleteError))?; .or(Err(DatabaseError::DeleteError))?;
let mut database_tags: Vec<Tag> = tags let mut database_tags: Vec<Tag> = tags
.filter(schema::tags::tag.eq_any(new_tags)) .filter(schema::tags::tag.eq_any(new_tags))
.load::<Tag>(&connection) .load::<Tag>(&connection)
.expect("Error loading tags"); .expect("Error loading tags");
// create missing tags // create missing tags
for new_tag in new_tags { for new_tag in new_tags {
if !database_tags.iter().any(|tab_db| tab_db.tag.eq(new_tag)) { if !database_tags.iter().any(|tab_db| tab_db.tag.eq(new_tag)) {
database_tags.push(create_tag(new_tag).expect("Error inserting tag")); database_tags.push(create_tag(new_tag).expect("Error inserting tag"));
} }
} }
for database_tag in database_tags { for database_tag in database_tags {
create_junction_tag(database_tag, None, Some(schedule)) create_junction_tag(database_tag, None, Some(schedule))
.expect("Error saving junction between tag and schedule"); .expect("Error saving junction between tag and schedule");
} }
Ok(()) Ok(())
} }

View file

@ -1,74 +1,74 @@
table! { table! {
controllers (id) { controllers (id) {
id -> Integer, id -> Integer,
uid -> Text, uid -> Text,
name -> Text, name -> Text,
ip -> Nullable<Text>, ip -> Nullable<Text>,
port -> Nullable<Integer>, port -> Nullable<Integer>,
relay_count -> Nullable<Integer>, relay_count -> Nullable<Integer>,
active -> Bool, active -> Bool,
} }
} }
table! { table! {
junction_relay_schedule (id) { junction_relay_schedule (id) {
id -> Integer, id -> Integer,
weekday -> SmallInt, weekday -> SmallInt,
relay_id -> Nullable<Integer>, relay_id -> Nullable<Integer>,
schedule_id -> Nullable<Integer>, schedule_id -> Nullable<Integer>,
} }
} }
table! { table! {
junction_tag (id) { junction_tag (id) {
id -> Integer, id -> Integer,
tag_id -> Integer, tag_id -> Integer,
relay_id -> Nullable<Integer>, relay_id -> Nullable<Integer>,
schedule_id -> Nullable<Integer>, schedule_id -> Nullable<Integer>,
} }
} }
table! { table! {
macro_actions (id) { macro_actions (id) {
id -> Integer, id -> Integer,
macro_id -> Integer, macro_id -> Integer,
relay_id -> Integer, relay_id -> Integer,
schedule_id -> Integer, schedule_id -> Integer,
weekday -> SmallInt, weekday -> SmallInt,
} }
} }
table! { table! {
macros (id) { macros (id) {
id -> Integer, id -> Integer,
uid -> Text, uid -> Text,
name -> Text, name -> Text,
} }
} }
table! { table! {
relays (id) { relays (id) {
id -> Integer, id -> Integer,
name -> Text, name -> Text,
number -> Integer, number -> Integer,
controller_id -> Integer, controller_id -> Integer,
} }
} }
table! { table! {
schedules (id) { schedules (id) {
id -> Integer, id -> Integer,
uid -> Binary, uid -> Binary,
name -> Text, name -> Text,
periods -> Binary, periods -> Binary,
} }
} }
table! { table! {
tags (id) { tags (id) {
id -> Integer, id -> Integer,
tag -> Text, tag -> Text,
} }
} }
joinable!(junction_relay_schedule -> relays (relay_id)); joinable!(junction_relay_schedule -> relays (relay_id));
@ -82,12 +82,12 @@ joinable!(macro_actions -> schedules (schedule_id));
joinable!(relays -> controllers (controller_id)); joinable!(relays -> controllers (controller_id));
allow_tables_to_appear_in_same_query!( allow_tables_to_appear_in_same_query!(
controllers, controllers,
junction_relay_schedule, junction_relay_schedule,
junction_tag, junction_tag,
macro_actions, macro_actions,
macros, macros,
relays, relays,
schedules, schedules,
tags, tags,
); );

View file

@ -8,56 +8,56 @@ use crate::db::schema::tags::dsl::tags;
use crate::db::{get_connection, schema}; use crate::db::{get_connection, schema};
pub fn create_tag(new_tag: &str) -> Result<Tag, DatabaseError> { pub fn create_tag(new_tag: &str) -> Result<Tag, DatabaseError> {
let connection = get_connection(); let connection = get_connection();
let new_tag = NewTag { tag: new_tag }; let new_tag = NewTag { tag: new_tag };
diesel::insert_into(tags) diesel::insert_into(tags)
.values(&new_tag) .values(&new_tag)
.execute(&connection) .execute(&connection)
.map_err(DatabaseError::InsertError)?; .map_err(DatabaseError::InsertError)?;
let result = tags let result = tags
.find(sql("last_insert_rowid()")) .find(sql("last_insert_rowid()"))
.get_result::<Tag>(&connection) .get_result::<Tag>(&connection)
.or(Err(DatabaseError::InsertGetError))?; .or(Err(DatabaseError::InsertGetError))?;
Ok(result) Ok(result)
} }
pub fn get_tag(target_tag: &str) -> Result<Tag, DatabaseError> { pub fn get_tag(target_tag: &str) -> Result<Tag, DatabaseError> {
let connection = get_connection(); let connection = get_connection();
let result = tags let result = tags
.filter(schema::tags::tag.eq(target_tag)) .filter(schema::tags::tag.eq(target_tag))
.first::<Tag>(&connection) .first::<Tag>(&connection)
.or(Err(DatabaseError::NotFound))?; .or(Err(DatabaseError::NotFound))?;
Ok(result) Ok(result)
} }
pub fn create_junction_tag( pub fn create_junction_tag(
target_tag: Tag, target_tag: Tag,
target_relay: Option<&Relay>, target_relay: Option<&Relay>,
target_schedule: Option<&Schedule>, target_schedule: Option<&Schedule>,
) -> Result<JunctionTag, DatabaseError> { ) -> Result<JunctionTag, DatabaseError> {
let connection = get_connection(); let connection = get_connection();
let new_junction_tag = NewJunctionTag { let new_junction_tag = NewJunctionTag {
relay_id: target_relay.map(|r| r.id), relay_id: target_relay.map(|r| r.id),
schedule_id: target_schedule.map(|s| s.id), schedule_id: target_schedule.map(|s| s.id),
tag_id: target_tag.id, tag_id: target_tag.id,
}; };
diesel::insert_into(junction_tag) diesel::insert_into(junction_tag)
.values(&new_junction_tag) .values(&new_junction_tag)
.execute(&connection) .execute(&connection)
.map_err(DatabaseError::InsertError)?; .map_err(DatabaseError::InsertError)?;
let result = junction_tag let result = junction_tag
.find(sql("last_insert_rowid()")) .find(sql("last_insert_rowid()"))
.get_result::<JunctionTag>(&connection) .get_result::<JunctionTag>(&connection)
.or(Err(DatabaseError::InsertGetError))?; .or(Err(DatabaseError::InsertGetError))?;
Ok(result) Ok(result)
} }

View file

@ -5,42 +5,42 @@ use serde::{Serialize, Serializer};
#[derive(Debug)] #[derive(Debug)]
pub enum HandlerError { pub enum HandlerError {
BadUid, BadUid,
ProtectedSchedule, ProtectedSchedule,
} }
impl HandlerError { impl HandlerError {
fn get_code(&self) -> StatusCode { fn get_code(&self) -> StatusCode {
match self { match self {
HandlerError::BadUid => StatusCode::BAD_REQUEST, HandlerError::BadUid => StatusCode::BAD_REQUEST,
HandlerError::ProtectedSchedule => StatusCode::FORBIDDEN, HandlerError::ProtectedSchedule => StatusCode::FORBIDDEN,
} }
} }
} }
impl Serialize for HandlerError { impl Serialize for HandlerError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where where
S: Serializer, S: Serializer,
{ {
let mut s = serializer.serialize_struct("error", 2)?; let mut s = serializer.serialize_struct("error", 2)?;
s.serialize_field("code", &self.get_code().as_u16())?; s.serialize_field("code", &self.get_code().as_u16())?;
s.serialize_field("description", &String::from(self))?; s.serialize_field("description", &String::from(self))?;
s.end() s.end()
} }
} }
impl From<&HandlerError> for String { impl From<&HandlerError> for String {
fn from(err: &HandlerError) -> Self { fn from(err: &HandlerError) -> Self {
match err { match err {
HandlerError::BadUid => String::from("the uid is in a bad format"), HandlerError::BadUid => String::from("the uid is in a bad format"),
HandlerError::ProtectedSchedule => String::from("the targeted schedule is protected"), HandlerError::ProtectedSchedule => String::from("the targeted schedule is protected"),
} }
} }
} }
impl From<HandlerError> for HttpResponse { impl From<HandlerError> for HttpResponse {
fn from(err: HandlerError) -> Self { fn from(err: HandlerError) -> Self {
HttpResponse::build(err.get_code()).json(err) HttpResponse::build(err.get_code()).json(err)
} }
} }

View file

@ -6,33 +6,33 @@ pub(crate) mod errors;
pub mod v1; pub mod v1;
enum EmgauwaJsonPayLoadError { enum EmgauwaJsonPayLoadError {
Error(error::JsonPayloadError), Error(error::JsonPayloadError),
} }
impl Serialize for EmgauwaJsonPayLoadError { impl Serialize for EmgauwaJsonPayLoadError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where where
S: Serializer, S: Serializer,
{ {
let mut s = serializer.serialize_struct("error", 3)?; let mut s = serializer.serialize_struct("error", 3)?;
s.serialize_field("type", "json-payload-error")?; s.serialize_field("type", "json-payload-error")?;
s.serialize_field("code", &400)?; s.serialize_field("code", &400)?;
s.serialize_field( s.serialize_field(
"description", "description",
&match self { &match self {
EmgauwaJsonPayLoadError::Error(err) => format!("{}", err), EmgauwaJsonPayLoadError::Error(err) => format!("{}", err),
}, },
)?; )?;
s.end() s.end()
} }
} }
pub fn json_error_handler(err: error::JsonPayloadError, _: &HttpRequest) -> Error { pub fn json_error_handler(err: error::JsonPayloadError, _: &HttpRequest) -> Error {
error::InternalError::from_response( error::InternalError::from_response(
"", "",
HttpResponse::BadRequest() HttpResponse::BadRequest()
.content_type("application/json") .content_type("application/json")
.json(EmgauwaJsonPayLoadError::Error(err)), .json(EmgauwaJsonPayLoadError::Error(err)),
) )
.into() .into()
} }

View file

@ -14,142 +14,142 @@ use crate::utils::vec_has_error;
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct RequestSchedule { pub struct RequestSchedule {
name: String, name: String,
periods: Periods, periods: Periods,
tags: Vec<String>, tags: Vec<String>,
} }
#[get("/api/v1/schedules")] #[get("/api/v1/schedules")]
pub async fn index() -> impl Responder { pub async fn index() -> impl Responder {
let schedules = get_schedules(); let schedules = get_schedules();
let return_schedules: Vec<ReturnSchedule> = let return_schedules: Vec<ReturnSchedule> =
schedules.iter().map(ReturnSchedule::from).collect(); schedules.iter().map(ReturnSchedule::from).collect();
HttpResponse::Ok().json(return_schedules) HttpResponse::Ok().json(return_schedules)
} }
#[get("/api/v1/schedules/tag/{tag}")] #[get("/api/v1/schedules/tag/{tag}")]
pub async fn tagged(web::Path((tag,)): web::Path<(String,)>) -> impl Responder { pub async fn tagged(web::Path((tag,)): web::Path<(String,)>) -> impl Responder {
let tag_db = get_tag(&tag); let tag_db = get_tag(&tag);
if tag_db.is_err() { if tag_db.is_err() {
return HttpResponse::from(tag_db.unwrap_err()); return HttpResponse::from(tag_db.unwrap_err());
} }
let tag_db = tag_db.unwrap(); let tag_db = tag_db.unwrap();
let schedules = get_schedules_by_tag(&tag_db); let schedules = get_schedules_by_tag(&tag_db);
let return_schedules: Vec<ReturnSchedule> = let return_schedules: Vec<ReturnSchedule> =
schedules.iter().map(ReturnSchedule::from).collect(); schedules.iter().map(ReturnSchedule::from).collect();
HttpResponse::Ok().json(return_schedules) HttpResponse::Ok().json(return_schedules)
} }
#[get("/api/v1/schedules/{schedule_id}")] #[get("/api/v1/schedules/{schedule_id}")]
pub async fn show(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Responder { pub async fn show(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Responder {
let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid)); let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid));
match emgauwa_uid { match emgauwa_uid {
Ok(uid) => { Ok(uid) => {
let schedule = get_schedule_by_uid(uid); let schedule = get_schedule_by_uid(uid);
match schedule { match schedule {
Ok(ok) => HttpResponse::Ok().json(ReturnSchedule::from(ok)), Ok(ok) => HttpResponse::Ok().json(ReturnSchedule::from(ok)),
Err(err) => HttpResponse::from(err), Err(err) => HttpResponse::from(err),
} }
} }
Err(err) => HttpResponse::from(err), Err(err) => HttpResponse::from(err),
} }
} }
#[post("/api/v1/schedules")] #[post("/api/v1/schedules")]
pub async fn add(data: web::Json<RequestSchedule>) -> impl Responder { pub async fn add(data: web::Json<RequestSchedule>) -> impl Responder {
let new_schedule = create_schedule(&data.name, &data.periods); let new_schedule = create_schedule(&data.name, &data.periods);
if new_schedule.is_err() { if new_schedule.is_err() {
return HttpResponse::from(new_schedule.unwrap_err()); return HttpResponse::from(new_schedule.unwrap_err());
} }
let new_schedule = new_schedule.unwrap(); let new_schedule = new_schedule.unwrap();
let result = set_schedule_tags(&new_schedule, data.tags.as_slice()); let result = set_schedule_tags(&new_schedule, data.tags.as_slice());
if result.is_err() { if result.is_err() {
return HttpResponse::from(result.unwrap_err()); return HttpResponse::from(result.unwrap_err());
} }
HttpResponse::Created().json(ReturnSchedule::from(new_schedule)) HttpResponse::Created().json(ReturnSchedule::from(new_schedule))
} }
#[post("/api/v1/schedules/list")] #[post("/api/v1/schedules/list")]
pub async fn add_list(data: web::Json<Vec<RequestSchedule>>) -> impl Responder { pub async fn add_list(data: web::Json<Vec<RequestSchedule>>) -> impl Responder {
let result: Vec<Result<Schedule, DatabaseError>> = data let result: Vec<Result<Schedule, DatabaseError>> = data
.as_slice() .as_slice()
.iter() .iter()
.map(|request_schedule| { .map(|request_schedule| {
let new_schedule = create_schedule(&request_schedule.name, &request_schedule.periods)?; let new_schedule = create_schedule(&request_schedule.name, &request_schedule.periods)?;
set_schedule_tags(&new_schedule, request_schedule.tags.as_slice())?; set_schedule_tags(&new_schedule, request_schedule.tags.as_slice())?;
Ok(new_schedule) Ok(new_schedule)
}) })
.collect(); .collect();
match vec_has_error(&result) { match vec_has_error(&result) {
true => HttpResponse::from( true => HttpResponse::from(
result result
.into_iter() .into_iter()
.find(|r| r.is_err()) .find(|r| r.is_err())
.unwrap() .unwrap()
.unwrap_err(), .unwrap_err(),
), ),
false => { false => {
let return_schedules: Vec<ReturnSchedule> = result let return_schedules: Vec<ReturnSchedule> = result
.iter() .iter()
.map(|s| ReturnSchedule::from(s.as_ref().unwrap())) .map(|s| ReturnSchedule::from(s.as_ref().unwrap()))
.collect(); .collect();
HttpResponse::Created().json(return_schedules) HttpResponse::Created().json(return_schedules)
} }
} }
} }
#[put("/api/v1/schedules/{schedule_id}")] #[put("/api/v1/schedules/{schedule_id}")]
pub async fn update( pub async fn update(
web::Path((schedule_uid,)): web::Path<(String,)>, web::Path((schedule_uid,)): web::Path<(String,)>,
data: web::Json<RequestSchedule>, data: web::Json<RequestSchedule>,
) -> impl Responder { ) -> impl Responder {
let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid)); let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid));
if emgauwa_uid.is_err() { if emgauwa_uid.is_err() {
return HttpResponse::from(emgauwa_uid.unwrap_err()); return HttpResponse::from(emgauwa_uid.unwrap_err());
} }
let emgauwa_uid = emgauwa_uid.unwrap(); let emgauwa_uid = emgauwa_uid.unwrap();
let schedule = get_schedule_by_uid(emgauwa_uid); let schedule = get_schedule_by_uid(emgauwa_uid);
if schedule.is_err() { if schedule.is_err() {
return HttpResponse::from(schedule.unwrap_err()); return HttpResponse::from(schedule.unwrap_err());
} }
let schedule = schedule.unwrap(); let schedule = schedule.unwrap();
let schedule = update_schedule(&schedule, data.name.as_str(), data.periods.borrow()); let schedule = update_schedule(&schedule, data.name.as_str(), data.periods.borrow());
if schedule.is_err() { if schedule.is_err() {
return HttpResponse::from(schedule.unwrap_err()); return HttpResponse::from(schedule.unwrap_err());
} }
let schedule = schedule.unwrap(); let schedule = schedule.unwrap();
let result = set_schedule_tags(&schedule, data.tags.as_slice()); let result = set_schedule_tags(&schedule, data.tags.as_slice());
if result.is_err() { if result.is_err() {
return HttpResponse::from(result.unwrap_err()); return HttpResponse::from(result.unwrap_err());
} }
HttpResponse::Ok().json(ReturnSchedule::from(schedule)) HttpResponse::Ok().json(ReturnSchedule::from(schedule))
} }
#[delete("/api/v1/schedules/{schedule_id}")] #[delete("/api/v1/schedules/{schedule_id}")]
pub async fn delete(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Responder { pub async fn delete(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Responder {
let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid)); let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid));
match emgauwa_uid { match emgauwa_uid {
Ok(uid) => match uid { Ok(uid) => match uid {
EmgauwaUid::Off => HttpResponse::from(HandlerError::ProtectedSchedule), EmgauwaUid::Off => HttpResponse::from(HandlerError::ProtectedSchedule),
EmgauwaUid::On => HttpResponse::from(HandlerError::ProtectedSchedule), EmgauwaUid::On => HttpResponse::from(HandlerError::ProtectedSchedule),
EmgauwaUid::Any(_) => match delete_schedule_by_uid(uid) { EmgauwaUid::Any(_) => match delete_schedule_by_uid(uid) {
Ok(_) => HttpResponse::Ok().json("schedule got deleted"), Ok(_) => HttpResponse::Ok().json("schedule got deleted"),
Err(err) => HttpResponse::from(err), Err(err) => HttpResponse::from(err),
}, },
}, },
Err(err) => HttpResponse::from(err), Err(err) => HttpResponse::from(err),
} }
} }

View file

@ -18,36 +18,36 @@ mod utils;
#[actix_web::main] #[actix_web::main]
async fn main() -> std::io::Result<()> { async fn main() -> std::io::Result<()> {
db::run_migrations(); db::run_migrations();
Builder::from_env(Env::default().default_filter_or("info")).init(); Builder::from_env(Env::default().default_filter_or("info")).init();
let pi = wiringpi::setup(); let pi = wiringpi::setup();
//Use WiringPi pin 0 as output //Use WiringPi pin 0 as output
let pin = pi.output_pin(0); let pin = pi.output_pin(0);
pin.digital_write(High); pin.digital_write(High);
HttpServer::new(|| { HttpServer::new(|| {
App::new() App::new()
.wrap( .wrap(
middleware::DefaultHeaders::new() middleware::DefaultHeaders::new()
.header("Access-Control-Allow-Origin", "*") .header("Access-Control-Allow-Origin", "*")
.header("Access-Control-Allow-Headers", "*") .header("Access-Control-Allow-Headers", "*")
.header("Access-Control-Allow-Methods", "*"), .header("Access-Control-Allow-Methods", "*"),
) )
.wrap(middleware::Logger::default()) .wrap(middleware::Logger::default())
.wrap(middleware::NormalizePath::new(TrailingSlash::Trim)) .wrap(middleware::NormalizePath::new(TrailingSlash::Trim))
.app_data(web::JsonConfig::default().error_handler(handlers::json_error_handler)) .app_data(web::JsonConfig::default().error_handler(handlers::json_error_handler))
.service(handlers::v1::schedules::index) .service(handlers::v1::schedules::index)
.service(handlers::v1::schedules::tagged) .service(handlers::v1::schedules::tagged)
.service(handlers::v1::schedules::show) .service(handlers::v1::schedules::show)
.service(handlers::v1::schedules::add) .service(handlers::v1::schedules::add)
.service(handlers::v1::schedules::add_list) .service(handlers::v1::schedules::add_list)
.service(handlers::v1::schedules::update) .service(handlers::v1::schedules::update)
.service(handlers::v1::schedules::delete) .service(handlers::v1::schedules::delete)
}) })
.bind("127.0.0.1:5000")? .bind("127.0.0.1:5000")?
.run() .run()
.await .await
} }

View file

@ -5,20 +5,20 @@ use crate::db::schedules::get_schedule_tags;
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
pub struct ReturnSchedule { pub struct ReturnSchedule {
#[serde(flatten)] #[serde(flatten)]
pub schedule: Schedule, pub schedule: Schedule,
pub tags: Vec<String>, pub tags: Vec<String>,
} }
impl From<Schedule> for ReturnSchedule { impl From<Schedule> for ReturnSchedule {
fn from(schedule: Schedule) -> Self { fn from(schedule: Schedule) -> Self {
let tags: Vec<String> = get_schedule_tags(&schedule); let tags: Vec<String> = get_schedule_tags(&schedule);
ReturnSchedule { schedule, tags } ReturnSchedule { schedule, tags }
} }
} }
impl From<&Schedule> for ReturnSchedule { impl From<&Schedule> for ReturnSchedule {
fn from(schedule: &Schedule) -> Self { fn from(schedule: &Schedule) -> Self {
ReturnSchedule::from(schedule.clone()) ReturnSchedule::from(schedule.clone())
} }
} }

View file

@ -15,110 +15,110 @@ use uuid::Uuid;
#[derive(AsExpression, FromSqlRow, PartialEq, Clone)] #[derive(AsExpression, FromSqlRow, PartialEq, Clone)]
#[sql_type = "Binary"] #[sql_type = "Binary"]
pub enum EmgauwaUid { pub enum EmgauwaUid {
Off, Off,
On, On,
Any(Uuid), Any(Uuid),
} }
impl EmgauwaUid { impl EmgauwaUid {
const OFF_STR: &'static str = "off"; const OFF_STR: &'static str = "off";
const ON_STR: &'static str = "on"; const ON_STR: &'static str = "on";
const OFF_U8: u8 = 0; const OFF_U8: u8 = 0;
const ON_U8: u8 = 1; const ON_U8: u8 = 1;
const OFF_U128: u128 = 0; const OFF_U128: u128 = 0;
const ON_U128: u128 = 1; const ON_U128: u128 = 1;
} }
impl Default for EmgauwaUid { impl Default for EmgauwaUid {
fn default() -> Self { fn default() -> Self {
EmgauwaUid::Any(Uuid::new_v4()) EmgauwaUid::Any(Uuid::new_v4())
} }
} }
impl Debug for EmgauwaUid { impl Debug for EmgauwaUid {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self { match self {
EmgauwaUid::Off => EmgauwaUid::OFF_STR.fmt(f), EmgauwaUid::Off => EmgauwaUid::OFF_STR.fmt(f),
EmgauwaUid::On => EmgauwaUid::ON_STR.fmt(f), EmgauwaUid::On => EmgauwaUid::ON_STR.fmt(f),
EmgauwaUid::Any(value) => value.fmt(f), EmgauwaUid::Any(value) => value.fmt(f),
} }
} }
} }
impl ToSql<Binary, Sqlite> for EmgauwaUid { impl ToSql<Binary, Sqlite> for EmgauwaUid {
fn to_sql<W: Write>(&self, out: &mut Output<W, Sqlite>) -> serialize::Result { fn to_sql<W: Write>(&self, out: &mut Output<W, Sqlite>) -> serialize::Result {
match self { match self {
EmgauwaUid::Off => out.write_all(&[EmgauwaUid::OFF_U8])?, EmgauwaUid::Off => out.write_all(&[EmgauwaUid::OFF_U8])?,
EmgauwaUid::On => out.write_all(&[EmgauwaUid::ON_U8])?, EmgauwaUid::On => out.write_all(&[EmgauwaUid::ON_U8])?,
EmgauwaUid::Any(value) => out.write_all(value.as_bytes())?, EmgauwaUid::Any(value) => out.write_all(value.as_bytes())?,
} }
Ok(IsNull::No) Ok(IsNull::No)
} }
} }
impl FromSql<Binary, Sqlite> for EmgauwaUid { impl FromSql<Binary, Sqlite> for EmgauwaUid {
fn from_sql(bytes: Option<&<Sqlite as Backend>::RawValue>) -> deserialize::Result<Self> { fn from_sql(bytes: Option<&<Sqlite as Backend>::RawValue>) -> deserialize::Result<Self> {
match bytes { match bytes {
None => Ok(EmgauwaUid::default()), None => Ok(EmgauwaUid::default()),
Some(value) => match value.read_blob() { Some(value) => match value.read_blob() {
[EmgauwaUid::OFF_U8] => Ok(EmgauwaUid::Off), [EmgauwaUid::OFF_U8] => Ok(EmgauwaUid::Off),
[EmgauwaUid::ON_U8] => Ok(EmgauwaUid::On), [EmgauwaUid::ON_U8] => Ok(EmgauwaUid::On),
value_bytes => Ok(EmgauwaUid::Any(Uuid::from_slice(value_bytes).unwrap())), value_bytes => Ok(EmgauwaUid::Any(Uuid::from_slice(value_bytes).unwrap())),
}, },
} }
} }
} }
impl Serialize for EmgauwaUid { impl Serialize for EmgauwaUid {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where where
S: Serializer, S: Serializer,
{ {
String::from(self).serialize(serializer) String::from(self).serialize(serializer)
} }
} }
impl From<Uuid> for EmgauwaUid { impl From<Uuid> for EmgauwaUid {
fn from(uid: Uuid) -> EmgauwaUid { fn from(uid: Uuid) -> EmgauwaUid {
match uid.as_u128() { match uid.as_u128() {
EmgauwaUid::OFF_U128 => EmgauwaUid::Off, EmgauwaUid::OFF_U128 => EmgauwaUid::Off,
EmgauwaUid::ON_U128 => EmgauwaUid::On, EmgauwaUid::ON_U128 => EmgauwaUid::On,
_ => EmgauwaUid::Any(uid), _ => EmgauwaUid::Any(uid),
} }
} }
} }
impl TryFrom<&str> for EmgauwaUid { impl TryFrom<&str> for EmgauwaUid {
type Error = uuid::Error; type Error = uuid::Error;
fn try_from(value: &str) -> Result<Self, Self::Error> { fn try_from(value: &str) -> Result<Self, Self::Error> {
match value { match value {
EmgauwaUid::OFF_STR => Ok(EmgauwaUid::Off), EmgauwaUid::OFF_STR => Ok(EmgauwaUid::Off),
EmgauwaUid::ON_STR => Ok(EmgauwaUid::On), EmgauwaUid::ON_STR => Ok(EmgauwaUid::On),
any => match Uuid::from_str(any) { any => match Uuid::from_str(any) {
Ok(uuid) => Ok(EmgauwaUid::Any(uuid)), Ok(uuid) => Ok(EmgauwaUid::Any(uuid)),
Err(err) => Err(err), Err(err) => Err(err),
}, },
} }
} }
} }
impl From<&EmgauwaUid> for Uuid { impl From<&EmgauwaUid> for Uuid {
fn from(emgauwa_uid: &EmgauwaUid) -> Uuid { fn from(emgauwa_uid: &EmgauwaUid) -> Uuid {
match emgauwa_uid { match emgauwa_uid {
EmgauwaUid::Off => uuid::Uuid::from_u128(EmgauwaUid::OFF_U128), EmgauwaUid::Off => uuid::Uuid::from_u128(EmgauwaUid::OFF_U128),
EmgauwaUid::On => uuid::Uuid::from_u128(EmgauwaUid::ON_U128), EmgauwaUid::On => uuid::Uuid::from_u128(EmgauwaUid::ON_U128),
EmgauwaUid::Any(value) => *value, EmgauwaUid::Any(value) => *value,
} }
} }
} }
impl From<&EmgauwaUid> for String { impl From<&EmgauwaUid> for String {
fn from(emgauwa_uid: &EmgauwaUid) -> String { fn from(emgauwa_uid: &EmgauwaUid) -> String {
match emgauwa_uid { match emgauwa_uid {
EmgauwaUid::Off => String::from(EmgauwaUid::OFF_STR), EmgauwaUid::Off => String::from(EmgauwaUid::OFF_STR),
EmgauwaUid::On => String::from(EmgauwaUid::ON_STR), EmgauwaUid::On => String::from(EmgauwaUid::ON_STR),
EmgauwaUid::Any(value) => value.to_hyphenated().to_string(), EmgauwaUid::Any(value) => value.to_hyphenated().to_string(),
} }
} }
} }

View file

@ -1,3 +1,3 @@
pub fn vec_has_error<T, E>(target: &[Result<T, E>]) -> bool { pub fn vec_has_error<T, E>(target: &[Result<T, E>]) -> bool {
target.iter().any(|t| t.is_err()) target.iter().any(|t| t.is_err())
} }