Add function and prefer .iter()

This commit is contained in:
Tobias Reisinger 2022-07-17 20:19:36 +02:00
parent 53c6fcd917
commit e3adc35221
12 changed files with 117 additions and 67 deletions

View file

@ -6,8 +6,8 @@ use dotenv::dotenv;
pub mod errors;
pub mod models;
pub mod schema;
pub mod schedules;
pub mod schema;
pub mod tag;
mod model_utils;
@ -25,4 +25,4 @@ fn get_connection() -> SqliteConnection {
pub fn run_migrations() {
let connection = get_connection();
embedded_migrations::run(&connection).expect("Failed to run migrations.");
}
}

View file

@ -1,5 +1,5 @@
use actix_web::HttpResponse;
use actix_web::http::StatusCode;
use actix_web::HttpResponse;
use serde::ser::SerializeStruct;
use serde::{Serialize, Serializer};
@ -18,7 +18,7 @@ impl DatabaseError {
match self {
DatabaseError::NotFound => StatusCode::NOT_FOUND,
DatabaseError::Protected => StatusCode::FORBIDDEN,
_ => StatusCode::INTERNAL_SERVER_ERROR
_ => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}

View file

@ -1,3 +1,4 @@
use crate::db::models::Periods;
use chrono::{NaiveTime, Timelike};
use diesel::backend::Backend;
use diesel::deserialize::FromSql;
@ -7,7 +8,6 @@ use diesel::sqlite::Sqlite;
use diesel::{deserialize, serialize};
use serde::{Deserialize, Serialize};
use std::io::Write;
use crate::db::models::Periods;
#[derive(Debug, Serialize, Deserialize, AsExpression, FromSqlRow, PartialEq, Clone)]
#[sql_type = "Binary"]
@ -57,7 +57,7 @@ impl ToSql<Binary, Sqlite> for Periods {
impl FromSql<Binary, Sqlite> for Periods {
fn from_sql(bytes: Option<&<Sqlite as Backend>::RawValue>) -> deserialize::Result<Self> {
let blob = bytes.unwrap().read_blob();
let blob = bytes.unwrap().read_blob();
let mut vec = Vec::new();
for i in (3..blob.len()).step_by(4) {

View file

@ -1,6 +1,6 @@
use crate::db::model_utils::Period;
use diesel::sql_types::Binary;
use serde::{Deserialize, Serialize};
use crate::db::model_utils::Period;
use super::schema::*;
use crate::types::EmgauwaUid;
@ -12,7 +12,7 @@ pub struct Relay {
// TODO
}
#[derive(Debug, Serialize, Identifiable, Queryable)]
#[derive(Debug, Serialize, Identifiable, Queryable, Clone)]
pub struct Schedule {
#[serde(skip)]
pub id: i32,

View file

@ -1,16 +1,16 @@
use std::borrow::Borrow;
use diesel::dsl::sql;
use diesel::prelude::*;
use std::borrow::Borrow;
use crate::types::EmgauwaUid;
use crate::db::errors::DatabaseError;
use crate::db::{get_connection, schema};
use crate::db::models::*;
use crate::db::schema::tags::dsl::tags;
use crate::db::schema::junction_tag::dsl::junction_tag;
use crate::db::schema::schedules::dsl::schedules;
use crate::db::schema::tags::dsl::tags;
use crate::db::tag::{create_junction_tag, create_tag};
use crate::db::{get_connection, schema};
pub fn get_schedule_tags(schedule: &Schedule) -> Vec<String> {
let connection = get_connection();
@ -51,11 +51,13 @@ pub fn delete_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<(), DatabaseErro
let filter_uid = match filter_uid {
EmgauwaUid::Off => Err(DatabaseError::Protected),
EmgauwaUid::On => Err(DatabaseError::Protected),
EmgauwaUid::Any(_) => Ok(filter_uid)
EmgauwaUid::Any(_) => Ok(filter_uid),
}?;
let connection = get_connection();
match diesel::delete(schedules.filter(schema::schedules::uid.eq(filter_uid))).execute(&connection) {
match diesel::delete(schedules.filter(schema::schedules::uid.eq(filter_uid)))
.execute(&connection)
{
Ok(rows) => {
if rows != 0 {
Ok(())
@ -89,7 +91,11 @@ pub fn create_schedule(new_name: &str, new_periods: &Periods) -> Result<Schedule
Ok(result)
}
pub fn update_schedule(schedule: &Schedule, new_name: &str, new_periods: &Periods) -> Result<Schedule, DatabaseError> {
pub fn update_schedule(
schedule: &Schedule,
new_name: &str,
new_periods: &Periods,
) -> Result<Schedule, DatabaseError> {
let connection = get_connection();
let new_periods = match schedule.uid {
@ -114,18 +120,20 @@ pub fn set_schedule_tags(schedule: &Schedule, new_tags: &[String]) -> Result<(),
.execute(&connection)
.or(Err(DatabaseError::DeleteError))?;
let mut database_tags: Vec<Tag> = tags.filter(schema::tags::tag.eq_any(new_tags))
let mut database_tags: Vec<Tag> = tags
.filter(schema::tags::tag.eq_any(new_tags))
.load::<Tag>(&connection)
.expect("Error loading tags");
let mut database_tags_iter = database_tags.clone().into_iter().map(|tag_db| tag_db.tag);
let database_tags_str: Vec<String> = database_tags
.iter()
.map(|tag_db| tag_db.tag.clone())
.collect();
// create missing tags
for new_tag in new_tags {
if !database_tags_iter.any(|t| t.eq(new_tag)) {
database_tags.push(
create_tag(new_tag).expect("Error inserting tag")
);
if !database_tags_str.contains(new_tag) {
database_tags.push(create_tag(new_tag).expect("Error inserting tag"));
}
}

View file

@ -1,20 +1,16 @@
use diesel::dsl::sql;
use diesel::prelude::*;
use crate::db::errors::DatabaseError;
use crate::db::{get_connection, schema};
use crate::db::models::*;
use crate::db::schema::tags::dsl::tags;
use crate::db::schema::junction_tag::dsl::junction_tag;
use crate::db::schema::tags::dsl::tags;
use crate::db::{get_connection, schema};
pub fn create_tag(new_tag: &str) -> Result<Tag, DatabaseError> {
let connection = get_connection();
let new_tag = NewTag {
tag: new_tag,
};
let new_tag = NewTag { tag: new_tag };
diesel::insert_into(tags)
.values(&new_tag)
@ -40,13 +36,17 @@ pub fn get_tag(target_tag: &str) -> Result<Tag, DatabaseError> {
Ok(result)
}
pub fn create_junction_tag(target_tag: Tag, target_relay: Option<&Relay>, target_schedule: Option<&Schedule>) -> Result<JunctionTag, DatabaseError> {
pub fn create_junction_tag(
target_tag: Tag,
target_relay: Option<&Relay>,
target_schedule: Option<&Schedule>,
) -> Result<JunctionTag, DatabaseError> {
let connection = get_connection();
let new_junction_tag = NewJunctionTag {
relay_id: target_relay.map(|r| r.id),
schedule_id: target_schedule.map(|s| s.id),
tag_id: target_tag.id
tag_id: target_tag.id,
};
diesel::insert_into(junction_tag)
@ -60,4 +60,4 @@ pub fn create_junction_tag(target_tag: Tag, target_relay: Option<&Relay>, target
.or(Err(DatabaseError::InsertGetError))?;
Ok(result)
}
}

View file

@ -6,7 +6,7 @@ use serde::{Serialize, Serializer};
#[derive(Debug)]
pub enum HandlerError {
BadUid,
ProtectedSchedule
ProtectedSchedule,
}
impl HandlerError {
@ -16,13 +16,12 @@ impl HandlerError {
HandlerError::ProtectedSchedule => StatusCode::FORBIDDEN,
}
}
}
impl Serialize for HandlerError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
where
S: Serializer,
{
let mut s = serializer.serialize_struct("error", 2)?;
s.serialize_field("code", &self.get_code().as_u16())?;

View file

@ -21,7 +21,7 @@ impl Serialize for EmgauwaJsonPayLoadError {
"description",
&match self {
EmgauwaJsonPayLoadError::Error(err) => format!("{}", err),
}
},
)?;
s.end()
}

View file

@ -1,14 +1,16 @@
use crate::db::errors::DatabaseError;
use actix_web::{delete, get, post, put, web, HttpResponse, Responder};
use serde::{Deserialize, Serialize};
use std::borrow::Borrow;
use std::convert::TryFrom;
use actix_web::{HttpResponse, Responder, web, get, post, put, delete};
use serde::{Serialize, Deserialize};
use crate::db::models::Periods;
use crate::db::models::{Periods, Schedule};
use crate::db::schedules::*;
use crate::db::tag::get_tag;
use crate::handlers::errors::HandlerError;
use crate::return_models::ReturnSchedule;
use crate::types::EmgauwaUid;
use crate::utils::vec_has_error;
#[derive(Debug, Serialize, Deserialize)]
pub struct RequestSchedule {
@ -20,27 +22,27 @@ pub struct RequestSchedule {
#[get("/api/v1/schedules")]
pub async fn index() -> impl Responder {
let schedules = get_schedules();
let return_schedules: Vec<ReturnSchedule> = schedules.into_iter().map(ReturnSchedule::from).collect();
let return_schedules: Vec<ReturnSchedule> =
schedules.iter().map(ReturnSchedule::from).collect();
HttpResponse::Ok().json(return_schedules)
}
#[get("/api/v1/schedules/tag/{tag}")]
pub async fn tagged(web::Path((tag,)): web::Path<(String,)>) -> impl Responder {
let tag_db = get_tag(&tag);
if tag_db.is_err() {
return HttpResponse::from(tag_db.unwrap_err())
return HttpResponse::from(tag_db.unwrap_err());
}
let tag_db = tag_db.unwrap();
let schedules = get_schedules_by_tag(&tag_db);
let return_schedules: Vec<ReturnSchedule> = schedules.into_iter().map(ReturnSchedule::from).collect();
let return_schedules: Vec<ReturnSchedule> =
schedules.iter().map(ReturnSchedule::from).collect();
HttpResponse::Ok().json(return_schedules)
}
#[get("/api/v1/schedules/{schedule_id}")]
pub async fn show(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Responder {
let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid));
match emgauwa_uid {
@ -50,8 +52,8 @@ pub async fn show(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Resp
Ok(ok) => HttpResponse::Ok().json(ReturnSchedule::from(ok)),
Err(err) => HttpResponse::from(err),
}
},
Err(err) => HttpResponse::from(err)
}
Err(err) => HttpResponse::from(err),
}
}
@ -60,7 +62,7 @@ pub async fn add(data: web::Json<RequestSchedule>) -> impl Responder {
let new_schedule = create_schedule(&data.name, &data.periods);
if new_schedule.is_err() {
return HttpResponse::from(new_schedule.unwrap_err())
return HttpResponse::from(new_schedule.unwrap_err());
}
let new_schedule = new_schedule.unwrap();
@ -72,9 +74,43 @@ pub async fn add(data: web::Json<RequestSchedule>) -> impl Responder {
HttpResponse::Created().json(ReturnSchedule::from(new_schedule))
}
#[put("/api/v1/schedules/{schedule_id}")]
pub async fn update(web::Path((schedule_uid,)): web::Path<(String,)>, data: web::Json<RequestSchedule>) -> impl Responder {
#[post("/api/v1/schedules/list")]
pub async fn add_list(data: web::Json<Vec<RequestSchedule>>) -> impl Responder {
let result: Vec<Result<Schedule, DatabaseError>> = data
.as_slice()
.iter()
.map(|request_schedule| {
let new_schedule = create_schedule(&request_schedule.name, &request_schedule.periods)?;
set_schedule_tags(&new_schedule, request_schedule.tags.as_slice())?;
Ok(new_schedule)
})
.collect();
match vec_has_error(&result) {
true => HttpResponse::from(
result
.into_iter()
.find(|r| r.is_err())
.unwrap()
.unwrap_err(),
),
false => {
let return_schedules: Vec<ReturnSchedule> = result
.iter()
.map(|s| ReturnSchedule::from(s.as_ref().unwrap()))
.collect();
HttpResponse::Created().json(return_schedules)
}
}
}
#[put("/api/v1/schedules/{schedule_id}")]
pub async fn update(
web::Path((schedule_uid,)): web::Path<(String,)>,
data: web::Json<RequestSchedule>,
) -> impl Responder {
let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid));
if emgauwa_uid.is_err() {
return HttpResponse::from(emgauwa_uid.unwrap_err());
@ -109,13 +145,11 @@ pub async fn delete(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Re
Ok(uid) => match uid {
EmgauwaUid::Off => HttpResponse::from(HandlerError::ProtectedSchedule),
EmgauwaUid::On => HttpResponse::from(HandlerError::ProtectedSchedule),
EmgauwaUid::Any(_) => {
match delete_schedule_by_uid(uid) {
Ok(_) => HttpResponse::Ok().json("schedule got deleted"),
Err(err) => HttpResponse::from(err)
}
}
EmgauwaUid::Any(_) => match delete_schedule_by_uid(uid) {
Ok(_) => HttpResponse::Ok().json("schedule got deleted"),
Err(err) => HttpResponse::from(err),
},
},
Err(err) => HttpResponse::from(err)
Err(err) => HttpResponse::from(err),
}
}

View file

@ -2,11 +2,11 @@
extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
extern crate dotenv;
extern crate core;
extern crate dotenv;
use actix_web::{middleware, web, App, HttpServer};
use actix_web::middleware::normalize::TrailingSlash;
use actix_web::{middleware, web, App, HttpServer};
use env_logger::{Builder, Env};
use wiringpi::pin::Value::High;
@ -14,6 +14,7 @@ mod db;
mod handlers;
mod return_models;
mod types;
mod utils;
#[actix_web::main]
async fn main() -> std::io::Result<()> {
@ -29,10 +30,11 @@ async fn main() -> std::io::Result<()> {
HttpServer::new(|| {
App::new()
.wrap(middleware::DefaultHeaders::new()
.header("Access-Control-Allow-Origin", "*")
.header("Access-Control-Allow-Headers", "*")
.header("Access-Control-Allow-Methods", "*")
.wrap(
middleware::DefaultHeaders::new()
.header("Access-Control-Allow-Origin", "*")
.header("Access-Control-Allow-Headers", "*")
.header("Access-Control-Allow-Methods", "*"),
)
.wrap(middleware::Logger::default())
.wrap(middleware::NormalizePath::new(TrailingSlash::Trim))
@ -41,6 +43,7 @@ async fn main() -> std::io::Result<()> {
.service(handlers::v1::schedules::tagged)
.service(handlers::v1::schedules::show)
.service(handlers::v1::schedules::add)
.service(handlers::v1::schedules::add_list)
.service(handlers::v1::schedules::update)
.service(handlers::v1::schedules::delete)
})

View file

@ -1,4 +1,4 @@
use serde::{Serialize};
use serde::Serialize;
use crate::db::models::Schedule;
use crate::db::schedules::get_schedule_tags;
@ -13,9 +13,12 @@ pub struct ReturnSchedule {
impl From<Schedule> for ReturnSchedule {
fn from(schedule: Schedule) -> Self {
let tags: Vec<String> = get_schedule_tags(&schedule);
ReturnSchedule {
schedule,
tags,
}
ReturnSchedule { schedule, tags }
}
}
}
impl From<&Schedule> for ReturnSchedule {
fn from(schedule: &Schedule) -> Self {
ReturnSchedule::from(schedule.clone())
}
}

3
src/utils.rs Normal file
View file

@ -0,0 +1,3 @@
pub fn vec_has_error<T, E>(target: &[Result<T, E>]) -> bool {
target.iter().any(|t| t.is_err())
}