Start rust rewrite
This commit is contained in:
commit
12d57d020f
22 changed files with 2599 additions and 0 deletions
62
src/db.rs
Normal file
62
src/db.rs
Normal file
|
@ -0,0 +1,62 @@
|
|||
pub mod errors;
|
||||
pub mod models;
|
||||
pub mod schema;
|
||||
mod types;
|
||||
|
||||
use diesel::prelude::*;
|
||||
|
||||
use diesel::dsl::sql;
|
||||
use dotenv::dotenv;
|
||||
use std::env;
|
||||
|
||||
use models::*;
|
||||
use schema::schedules::dsl::*;
|
||||
|
||||
use diesel_migrations::embed_migrations;
|
||||
use errors::DatabaseError;
|
||||
use types::EmgauwaUid;
|
||||
|
||||
embed_migrations!("migrations");
|
||||
|
||||
fn get_connection() -> SqliteConnection {
|
||||
dotenv().ok();
|
||||
|
||||
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
|
||||
SqliteConnection::establish(&database_url)
|
||||
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url))
|
||||
}
|
||||
|
||||
pub fn run_migrations() {
|
||||
let connection = get_connection();
|
||||
embedded_migrations::run(&connection).expect("Failed to run migrations.");
|
||||
}
|
||||
|
||||
pub fn get_schedules() -> Vec<Schedule> {
|
||||
let connection = get_connection();
|
||||
schedules
|
||||
.limit(5)
|
||||
.load::<Schedule>(&connection)
|
||||
.expect("Error loading schedules")
|
||||
}
|
||||
|
||||
pub fn create_schedule(new_name: &str) -> Result<Schedule, DatabaseError> {
|
||||
let connection = get_connection();
|
||||
|
||||
let new_schedule = NewSchedule {
|
||||
uid: &EmgauwaUid::default(),
|
||||
name: new_name,
|
||||
periods: "",
|
||||
};
|
||||
|
||||
diesel::insert_into(schedules)
|
||||
.values(&new_schedule)
|
||||
.execute(&connection)
|
||||
.or(Err(DatabaseError::InsertError))?;
|
||||
|
||||
let result = schedules
|
||||
.find(sql("last_insert_rowid()"))
|
||||
.get_result::<Schedule>(&connection)
|
||||
.or(Err(DatabaseError::InsertGetError))?;
|
||||
|
||||
Ok(result)
|
||||
}
|
30
src/db/errors.rs
Normal file
30
src/db/errors.rs
Normal file
|
@ -0,0 +1,30 @@
|
|||
use serde::ser::SerializeStruct;
|
||||
use serde::{Serialize, Serializer};
|
||||
|
||||
pub enum DatabaseError {
|
||||
InsertError,
|
||||
InsertGetError,
|
||||
}
|
||||
|
||||
impl Serialize for DatabaseError {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_struct("error", 2)?;
|
||||
s.serialize_field("code", &500)?;
|
||||
s.serialize_field("description", &String::from(self))?;
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&DatabaseError> for String {
|
||||
fn from(err: &DatabaseError) -> Self {
|
||||
match err {
|
||||
DatabaseError::InsertError => String::from("error inserting into database"),
|
||||
DatabaseError::InsertGetError => {
|
||||
String::from("error retrieving new entry from database (your entry was saved)")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
20
src/db/models.rs
Normal file
20
src/db/models.rs
Normal file
|
@ -0,0 +1,20 @@
|
|||
use super::types::EmgauwaUid;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::schema::schedules;
|
||||
|
||||
#[derive(Serialize, Queryable)]
|
||||
pub struct Schedule {
|
||||
pub id: i32,
|
||||
pub uid: EmgauwaUid,
|
||||
pub name: String,
|
||||
pub periods: String,
|
||||
}
|
||||
|
||||
#[derive(Insertable)]
|
||||
#[table_name = "schedules"]
|
||||
pub struct NewSchedule<'a> {
|
||||
pub uid: &'a EmgauwaUid,
|
||||
pub name: &'a str,
|
||||
pub periods: &'a str,
|
||||
}
|
93
src/db/schema.rs
Normal file
93
src/db/schema.rs
Normal file
|
@ -0,0 +1,93 @@
|
|||
table! {
|
||||
controllers (id) {
|
||||
id -> Integer,
|
||||
uid -> Text,
|
||||
name -> Nullable<Text>,
|
||||
ip -> Nullable<Text>,
|
||||
port -> Nullable<Integer>,
|
||||
relay_count -> Nullable<Integer>,
|
||||
active -> Bool,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
junction_relay_schedule (id) {
|
||||
id -> Integer,
|
||||
weekday -> SmallInt,
|
||||
relay_id -> Nullable<Integer>,
|
||||
schedule_id -> Nullable<Integer>,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
junction_tag (id) {
|
||||
id -> Integer,
|
||||
tag_id -> Integer,
|
||||
relay_id -> Nullable<Integer>,
|
||||
schedule_id -> Nullable<Integer>,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
macro_actions (id) {
|
||||
id -> Integer,
|
||||
macro_id -> Integer,
|
||||
relay_id -> Nullable<Integer>,
|
||||
schedule_id -> Nullable<Integer>,
|
||||
weekday -> SmallInt,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
macros (id) {
|
||||
id -> Integer,
|
||||
uid -> Text,
|
||||
name -> Nullable<Text>,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
relays (id) {
|
||||
id -> Integer,
|
||||
name -> Nullable<Text>,
|
||||
number -> Integer,
|
||||
controller_id -> Integer,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
schedules (id) {
|
||||
id -> Integer,
|
||||
uid -> Binary,
|
||||
name -> Text,
|
||||
periods -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
tags (id) {
|
||||
id -> Integer,
|
||||
tag -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
joinable!(junction_relay_schedule -> relays (relay_id));
|
||||
joinable!(junction_relay_schedule -> schedules (schedule_id));
|
||||
joinable!(junction_tag -> relays (relay_id));
|
||||
joinable!(junction_tag -> schedules (schedule_id));
|
||||
joinable!(junction_tag -> tags (tag_id));
|
||||
joinable!(macro_actions -> macros (macro_id));
|
||||
joinable!(macro_actions -> relays (relay_id));
|
||||
joinable!(macro_actions -> schedules (schedule_id));
|
||||
joinable!(relays -> controllers (controller_id));
|
||||
|
||||
allow_tables_to_appear_in_same_query!(
|
||||
controllers,
|
||||
junction_relay_schedule,
|
||||
junction_tag,
|
||||
macro_actions,
|
||||
macros,
|
||||
relays,
|
||||
schedules,
|
||||
tags,
|
||||
);
|
97
src/db/types.rs
Normal file
97
src/db/types.rs
Normal file
|
@ -0,0 +1,97 @@
|
|||
use diesel::backend::Backend;
|
||||
use diesel::deserialize::FromSql;
|
||||
use diesel::serialize::{IsNull, Output, ToSql};
|
||||
use diesel::sql_types::Binary;
|
||||
use diesel::sqlite::Sqlite;
|
||||
use diesel::{deserialize, serialize};
|
||||
use serde::{Serialize, Serializer};
|
||||
use std::fmt::{Debug, Formatter};
|
||||
use std::io::Write;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(AsExpression, FromSqlRow, PartialEq, Clone)]
|
||||
#[sql_type = "Binary"]
|
||||
pub enum EmgauwaUid {
|
||||
On,
|
||||
Off,
|
||||
Any(Uuid),
|
||||
}
|
||||
|
||||
impl Default for EmgauwaUid {
|
||||
fn default() -> Self {
|
||||
EmgauwaUid::Any(Uuid::new_v4())
|
||||
}
|
||||
}
|
||||
impl Debug for EmgauwaUid {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
EmgauwaUid::On => "on".fmt(f),
|
||||
EmgauwaUid::Off => "off".fmt(f),
|
||||
EmgauwaUid::Any(value) => value.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl ToSql<Binary, Sqlite> for EmgauwaUid {
|
||||
fn to_sql<W: Write>(&self, out: &mut Output<W, Sqlite>) -> serialize::Result {
|
||||
match self {
|
||||
EmgauwaUid::On => out.write_all(&[1])?,
|
||||
EmgauwaUid::Off => out.write_all(&[0])?,
|
||||
EmgauwaUid::Any(_) => out.write_all(Uuid::from(self).as_bytes())?,
|
||||
}
|
||||
Ok(IsNull::No)
|
||||
}
|
||||
}
|
||||
impl FromSql<Binary, Sqlite> for EmgauwaUid {
|
||||
fn from_sql(bytes: Option<&<Sqlite as Backend>::RawValue>) -> deserialize::Result<Self> {
|
||||
match bytes {
|
||||
None => Ok(EmgauwaUid::default()),
|
||||
Some(value) => match value.read_blob() {
|
||||
[0] => Ok(EmgauwaUid::Off),
|
||||
[1] => Ok(EmgauwaUid::On),
|
||||
value_bytes => Ok(EmgauwaUid::Any(Uuid::from_slice(value_bytes).unwrap())),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for EmgauwaUid {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match self {
|
||||
EmgauwaUid::On => "off".serialize(serializer),
|
||||
EmgauwaUid::Off => "on".serialize(serializer),
|
||||
EmgauwaUid::Any(value) => value.serialize(serializer),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl From<Uuid> for EmgauwaUid {
|
||||
fn from(uid: Uuid) -> EmgauwaUid {
|
||||
match uid.as_u128() {
|
||||
0 => EmgauwaUid::Off,
|
||||
1 => EmgauwaUid::On,
|
||||
_ => EmgauwaUid::Any(uid),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&EmgauwaUid> for Uuid {
|
||||
fn from(emgauwa_uid: &EmgauwaUid) -> Uuid {
|
||||
match emgauwa_uid {
|
||||
EmgauwaUid::On => uuid::Uuid::from_u128(1),
|
||||
EmgauwaUid::Off => uuid::Uuid::from_u128(0),
|
||||
EmgauwaUid::Any(value) => *value,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&EmgauwaUid> for String {
|
||||
fn from(emgauwa_uid: &EmgauwaUid) -> String {
|
||||
match emgauwa_uid {
|
||||
EmgauwaUid::On => String::from("off"),
|
||||
EmgauwaUid::Off => String::from("on"),
|
||||
EmgauwaUid::Any(value) => value.to_hyphenated().to_string(),
|
||||
}
|
||||
}
|
||||
}
|
1
src/handlers/mod.rs
Normal file
1
src/handlers/mod.rs
Normal file
|
@ -0,0 +1 @@
|
|||
pub mod v1;
|
1
src/handlers/v1/mod.rs
Normal file
1
src/handlers/v1/mod.rs
Normal file
|
@ -0,0 +1 @@
|
|||
pub mod schedules;
|
24
src/handlers/v1/schedules.rs
Normal file
24
src/handlers/v1/schedules.rs
Normal file
|
@ -0,0 +1,24 @@
|
|||
use crate::db;
|
||||
use actix_web::{HttpResponse, Responder};
|
||||
|
||||
pub async fn index() -> impl Responder {
|
||||
let schedules = db::get_schedules();
|
||||
HttpResponse::Ok().json(schedules)
|
||||
}
|
||||
|
||||
pub async fn get() -> impl Responder {
|
||||
"hello from get schedules by id"
|
||||
}
|
||||
|
||||
pub async fn add() -> impl Responder {
|
||||
let new_schedule = db::create_schedule("TEST");
|
||||
|
||||
match new_schedule {
|
||||
Ok(ok) => HttpResponse::Ok().json(ok),
|
||||
Err(err) => HttpResponse::InternalServerError().json(err),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn delete() -> impl Responder {
|
||||
"hello from delete schedule"
|
||||
}
|
38
src/main.rs
Normal file
38
src/main.rs
Normal file
|
@ -0,0 +1,38 @@
|
|||
mod db;
|
||||
mod handlers;
|
||||
|
||||
#[macro_use]
|
||||
extern crate diesel;
|
||||
#[macro_use]
|
||||
extern crate diesel_migrations;
|
||||
extern crate dotenv;
|
||||
|
||||
use actix_web::{web, App, HttpServer};
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
db::run_migrations();
|
||||
|
||||
HttpServer::new(|| {
|
||||
App::new()
|
||||
.route(
|
||||
"/api/v1/schedules",
|
||||
web::get().to(handlers::v1::schedules::index),
|
||||
)
|
||||
.route(
|
||||
"/api/v1/schedules",
|
||||
web::post().to(handlers::v1::schedules::add),
|
||||
)
|
||||
.route(
|
||||
"/api/v1/schedules/{id}",
|
||||
web::get().to(handlers::v1::schedules::get),
|
||||
)
|
||||
.route(
|
||||
"/api/v1/schedules/{id}",
|
||||
web::delete().to(handlers::v1::schedules::delete),
|
||||
)
|
||||
})
|
||||
.bind("127.0.0.1:5000")?
|
||||
.run()
|
||||
.await
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue