Start rust rewrite

This commit is contained in:
Tobias Reisinger 2021-11-04 23:37:16 +01:00
commit 12d57d020f
22 changed files with 693 additions and 0 deletions

15
.editorconfig Normal file
View file

@ -0,0 +1,15 @@
# EditorConfig is awesome:
# https://EditorConfig.org
# top-most EditorConfig file
root = true
# Unix-style newlines with a newline ending every file
[*]
end_of_line = lf
insert_final_newline = true
indent_style = space
indent_size = 4
[*.yml]
indent_size = 2

1
.env Normal file
View file

@ -0,0 +1 @@
DATABASE_URL=emgauwa-core.sqlite

11
.gitignore vendored Normal file
View file

@ -0,0 +1,11 @@
target/
tests/testing/
emgauwa-core.conf.d
emgauwa-core.sqlite
# Added by cargo
/target

BIN
Cargo.lock generated Normal file

Binary file not shown.

20
Cargo.toml Normal file
View file

@ -0,0 +1,20 @@
[package]
name = "emgauwa-core"
version = "0.1.0"
edition = "2021"
authors = ["Tobias Reisinger <tobias@msrg.cc>"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
#[profile.release]
#panic = 'abort'
[dependencies]
actix-web = "3"
diesel = { version = "1.4", features = ["sqlite", "uuid"] }
diesel_migrations = "1.4"
dotenv = "0.15"
serde = "1.0"
serde_json = "1.0"
serde_derive = "1.0"
uuid = { version = "0.8", features = ["serde", "v4"] }

1
README.md Normal file
View file

@ -0,0 +1 @@
[![Build Status](https://ci.serguzim.me/api/badges/emgauwa/core/status.svg)](https://ci.serguzim.me/emgauwa/core)

5
diesel.toml Normal file
View file

@ -0,0 +1,5 @@
# For documentation on how to configure this file,
# see diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/db/schema.rs"

17
emgauwa-core.conf Normal file
View file

@ -0,0 +1,17 @@
database = "emgauwa-core.sqlite"
content-dir = "/usr/share/webapps/emgauwa"
[not-found]
file = "404.html"
content = "404 - NOT FOUND"
content-type = "text/plain"
[bind]
http = "127.0.0.1:5000"
mqtt = "127.0.0.1:1883"
[logging]
level = "debug"
file = "stdout"
# vim: set ft=toml:

View file

@ -0,0 +1,8 @@
DROP TABLE macro_actions;
DROP TABLE macros;
DROP TABLE junction_relay_schedule;
DROP TABLE junction_tag;
DROP TABLE tags;
DROP TABLE schedules;
DROP TABLE relays;
DROP TABLE controllers;

View file

@ -0,0 +1,128 @@
CREATE TABLE controllers
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
uid VARCHAR(36)
NOT NULL
UNIQUE,
name VARCHAR(128),
ip VARCHAR(16),
port INTEGER,
relay_count INTEGER,
active BOOLEAN
NOT NULL
);
CREATE TABLE relays
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
name VARCHAR(128),
number INTEGER
NOT NULL,
controller_id INTEGER
NOT NULL
REFERENCES controllers (id)
ON DELETE CASCADE
);
CREATE TABLE schedules
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
uid BLOB
NOT NULL
UNIQUE,
name VARCHAR(128)
NOT NULL,
periods TEXT
NOT NULL
);
--INSERT INTO schedules (uid, name, periods) VALUES (x'6f666600000000000000000000000000', 'off', x'00');
--INSERT INTO schedules (uid, name, periods) VALUES (x'6f6e0000000000000000000000000000', 'on', x'010000009F05');
INSERT INTO schedules (uid, name, periods) VALUES (x'00', 'off', '00');
INSERT INTO schedules (uid, name, periods) VALUES (x'01', 'on', '010000009F05');
CREATE TABLE tags
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
tag VARCHAR(128)
NOT NULL
UNIQUE
);
CREATE TABLE junction_tag
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
tag_id INTEGER
NOT NULL
REFERENCES tags (id)
ON DELETE CASCADE,
relay_id INTEGER
REFERENCES relays (id)
ON DELETE CASCADE,
schedule_id INTEGER
REFERENCES schedules (id)
ON DELETE CASCADE
);
CREATE TABLE junction_relay_schedule
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
weekday SMALLINT
NOT NULL,
relay_id INTEGER
REFERENCES relays (id)
ON DELETE CASCADE,
schedule_id INTEGER
DEFAULT 1
REFERENCES schedules (id)
ON DELETE SET DEFAULT
);
CREATE TABLE macros
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
uid VARCHAR(36)
NOT NULL
UNIQUE,
name VARCHAR(128)
);
CREATE TABLE macro_actions
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT
NOT NULL,
macro_id INTEGER
NOT NULL
REFERENCES macros (id)
ON DELETE CASCADE,
relay_id INTEGER
REFERENCES relays (id)
ON DELETE CASCADE,
schedule_id INTEGER
REFERENCES schedules (id)
ON DELETE CASCADE,
weekday SMALLINT
NOT NULL
);

10
sql/cache.sql Normal file
View file

@ -0,0 +1,10 @@
-- a key-value table used for the json-cache
CREATE TABLE cache (
key STRING
PRIMARY KEY,
value TEXT
NOT NULL,
expiration INT
DEFAULT 0
);

83
sql/migration_0.sql Normal file
View file

@ -0,0 +1,83 @@
-- base migration
CREATE TABLE controllers
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT,
uid BLOB
NOT NULL
UNIQUE,
name VARCHAR(128),
ip VARCHAR(16),
port INTEGER,
relay_count INTEGER,
active BOOLEAN
NOT NULL
);
CREATE TABLE relays
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT,
name VARCHAR(128),
number INTEGER
NOT NULL,
controller_id INTEGER
NOT NULL
REFERENCES controllers (id)
ON DELETE CASCADE
);
CREATE TABLE schedules
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT,
uid BLOB
NOT NULL
UNIQUE,
name VARCHAR(128),
periods BLOB
);
CREATE TABLE tags
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT,
tag VARCHAR(128)
NOT NULL
UNIQUE
);
CREATE TABLE junction_tag
(
tag_id INTEGER
NOT NULL
REFERENCES tags (id)
ON DELETE CASCADE,
relay_id INTEGER
REFERENCES relays (id)
ON DELETE CASCADE,
schedule_id INTEGER
REFERENCES schedules (id)
ON DELETE CASCADE
);
CREATE TABLE junction_relay_schedule
(
weekday SMALLINT
NOT NULL,
relay_id INTEGER
REFERENCES relays (id)
ON DELETE CASCADE,
schedule_id INTEGER
DEFAULT 1
REFERENCES schedules (id)
ON DELETE SET DEFAULT
);
INSERT INTO schedules (uid, name, periods) VALUES (x'6f666600000000000000000000000000', 'off', x'00');
INSERT INTO schedules (uid, name, periods) VALUES (x'6f6e0000000000000000000000000000', 'on', x'010000009F05');

28
sql/migration_1.sql Normal file
View file

@ -0,0 +1,28 @@
-- migration to add macros
CREATE TABLE macros
(
id INTEGER
PRIMARY KEY
AUTOINCREMENT,
uid BLOB
NOT NULL
UNIQUE,
name VARCHAR(128)
);
CREATE TABLE macro_actions
(
macro_id INTEGER
NOT NULL
REFERENCES macros (id)
ON DELETE CASCADE,
relay_id INTEGER
REFERENCES relays (id)
ON DELETE CASCADE,
schedule_id INTEGER
REFERENCES schedules (id)
ON DELETE CASCADE,
weekday SMALLINT
NOT NULL
);

62
src/db.rs Normal file
View file

@ -0,0 +1,62 @@
pub mod errors;
pub mod models;
pub mod schema;
mod types;
use diesel::prelude::*;
use diesel::dsl::sql;
use dotenv::dotenv;
use std::env;
use models::*;
use schema::schedules::dsl::*;
use diesel_migrations::embed_migrations;
use errors::DatabaseError;
use types::EmgauwaUid;
embed_migrations!("migrations");
fn get_connection() -> SqliteConnection {
dotenv().ok();
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
SqliteConnection::establish(&database_url)
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url))
}
pub fn run_migrations() {
let connection = get_connection();
embedded_migrations::run(&connection).expect("Failed to run migrations.");
}
pub fn get_schedules() -> Vec<Schedule> {
let connection = get_connection();
schedules
.limit(5)
.load::<Schedule>(&connection)
.expect("Error loading schedules")
}
pub fn create_schedule(new_name: &str) -> Result<Schedule, DatabaseError> {
let connection = get_connection();
let new_schedule = NewSchedule {
uid: &EmgauwaUid::default(),
name: new_name,
periods: "",
};
diesel::insert_into(schedules)
.values(&new_schedule)
.execute(&connection)
.or(Err(DatabaseError::InsertError))?;
let result = schedules
.find(sql("last_insert_rowid()"))
.get_result::<Schedule>(&connection)
.or(Err(DatabaseError::InsertGetError))?;
Ok(result)
}

30
src/db/errors.rs Normal file
View file

@ -0,0 +1,30 @@
use serde::ser::SerializeStruct;
use serde::{Serialize, Serializer};
pub enum DatabaseError {
InsertError,
InsertGetError,
}
impl Serialize for DatabaseError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut s = serializer.serialize_struct("error", 2)?;
s.serialize_field("code", &500)?;
s.serialize_field("description", &String::from(self))?;
s.end()
}
}
impl From<&DatabaseError> for String {
fn from(err: &DatabaseError) -> Self {
match err {
DatabaseError::InsertError => String::from("error inserting into database"),
DatabaseError::InsertGetError => {
String::from("error retrieving new entry from database (your entry was saved)")
}
}
}
}

20
src/db/models.rs Normal file
View file

@ -0,0 +1,20 @@
use super::types::EmgauwaUid;
use serde::Serialize;
use super::schema::schedules;
#[derive(Serialize, Queryable)]
pub struct Schedule {
pub id: i32,
pub uid: EmgauwaUid,
pub name: String,
pub periods: String,
}
#[derive(Insertable)]
#[table_name = "schedules"]
pub struct NewSchedule<'a> {
pub uid: &'a EmgauwaUid,
pub name: &'a str,
pub periods: &'a str,
}

93
src/db/schema.rs Normal file
View file

@ -0,0 +1,93 @@
table! {
controllers (id) {
id -> Integer,
uid -> Text,
name -> Nullable<Text>,
ip -> Nullable<Text>,
port -> Nullable<Integer>,
relay_count -> Nullable<Integer>,
active -> Bool,
}
}
table! {
junction_relay_schedule (id) {
id -> Integer,
weekday -> SmallInt,
relay_id -> Nullable<Integer>,
schedule_id -> Nullable<Integer>,
}
}
table! {
junction_tag (id) {
id -> Integer,
tag_id -> Integer,
relay_id -> Nullable<Integer>,
schedule_id -> Nullable<Integer>,
}
}
table! {
macro_actions (id) {
id -> Integer,
macro_id -> Integer,
relay_id -> Nullable<Integer>,
schedule_id -> Nullable<Integer>,
weekday -> SmallInt,
}
}
table! {
macros (id) {
id -> Integer,
uid -> Text,
name -> Nullable<Text>,
}
}
table! {
relays (id) {
id -> Integer,
name -> Nullable<Text>,
number -> Integer,
controller_id -> Integer,
}
}
table! {
schedules (id) {
id -> Integer,
uid -> Binary,
name -> Text,
periods -> Text,
}
}
table! {
tags (id) {
id -> Integer,
tag -> Text,
}
}
joinable!(junction_relay_schedule -> relays (relay_id));
joinable!(junction_relay_schedule -> schedules (schedule_id));
joinable!(junction_tag -> relays (relay_id));
joinable!(junction_tag -> schedules (schedule_id));
joinable!(junction_tag -> tags (tag_id));
joinable!(macro_actions -> macros (macro_id));
joinable!(macro_actions -> relays (relay_id));
joinable!(macro_actions -> schedules (schedule_id));
joinable!(relays -> controllers (controller_id));
allow_tables_to_appear_in_same_query!(
controllers,
junction_relay_schedule,
junction_tag,
macro_actions,
macros,
relays,
schedules,
tags,
);

97
src/db/types.rs Normal file
View file

@ -0,0 +1,97 @@
use diesel::backend::Backend;
use diesel::deserialize::FromSql;
use diesel::serialize::{IsNull, Output, ToSql};
use diesel::sql_types::Binary;
use diesel::sqlite::Sqlite;
use diesel::{deserialize, serialize};
use serde::{Serialize, Serializer};
use std::fmt::{Debug, Formatter};
use std::io::Write;
use uuid::Uuid;
#[derive(AsExpression, FromSqlRow, PartialEq, Clone)]
#[sql_type = "Binary"]
pub enum EmgauwaUid {
On,
Off,
Any(Uuid),
}
impl Default for EmgauwaUid {
fn default() -> Self {
EmgauwaUid::Any(Uuid::new_v4())
}
}
impl Debug for EmgauwaUid {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
EmgauwaUid::On => "on".fmt(f),
EmgauwaUid::Off => "off".fmt(f),
EmgauwaUid::Any(value) => value.fmt(f),
}
}
}
impl ToSql<Binary, Sqlite> for EmgauwaUid {
fn to_sql<W: Write>(&self, out: &mut Output<W, Sqlite>) -> serialize::Result {
match self {
EmgauwaUid::On => out.write_all(&[1])?,
EmgauwaUid::Off => out.write_all(&[0])?,
EmgauwaUid::Any(_) => out.write_all(Uuid::from(self).as_bytes())?,
}
Ok(IsNull::No)
}
}
impl FromSql<Binary, Sqlite> for EmgauwaUid {
fn from_sql(bytes: Option<&<Sqlite as Backend>::RawValue>) -> deserialize::Result<Self> {
match bytes {
None => Ok(EmgauwaUid::default()),
Some(value) => match value.read_blob() {
[0] => Ok(EmgauwaUid::Off),
[1] => Ok(EmgauwaUid::On),
value_bytes => Ok(EmgauwaUid::Any(Uuid::from_slice(value_bytes).unwrap())),
},
}
}
}
impl Serialize for EmgauwaUid {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
EmgauwaUid::On => "off".serialize(serializer),
EmgauwaUid::Off => "on".serialize(serializer),
EmgauwaUid::Any(value) => value.serialize(serializer),
}
}
}
impl From<Uuid> for EmgauwaUid {
fn from(uid: Uuid) -> EmgauwaUid {
match uid.as_u128() {
0 => EmgauwaUid::Off,
1 => EmgauwaUid::On,
_ => EmgauwaUid::Any(uid),
}
}
}
impl From<&EmgauwaUid> for Uuid {
fn from(emgauwa_uid: &EmgauwaUid) -> Uuid {
match emgauwa_uid {
EmgauwaUid::On => uuid::Uuid::from_u128(1),
EmgauwaUid::Off => uuid::Uuid::from_u128(0),
EmgauwaUid::Any(value) => *value,
}
}
}
impl From<&EmgauwaUid> for String {
fn from(emgauwa_uid: &EmgauwaUid) -> String {
match emgauwa_uid {
EmgauwaUid::On => String::from("off"),
EmgauwaUid::Off => String::from("on"),
EmgauwaUid::Any(value) => value.to_hyphenated().to_string(),
}
}
}

1
src/handlers/mod.rs Normal file
View file

@ -0,0 +1 @@
pub mod v1;

1
src/handlers/v1/mod.rs Normal file
View file

@ -0,0 +1 @@
pub mod schedules;

View file

@ -0,0 +1,24 @@
use crate::db;
use actix_web::{HttpResponse, Responder};
pub async fn index() -> impl Responder {
let schedules = db::get_schedules();
HttpResponse::Ok().json(schedules)
}
pub async fn get() -> impl Responder {
"hello from get schedules by id"
}
pub async fn add() -> impl Responder {
let new_schedule = db::create_schedule("TEST");
match new_schedule {
Ok(ok) => HttpResponse::Ok().json(ok),
Err(err) => HttpResponse::InternalServerError().json(err),
}
}
pub async fn delete() -> impl Responder {
"hello from delete schedule"
}

38
src/main.rs Normal file
View file

@ -0,0 +1,38 @@
mod db;
mod handlers;
#[macro_use]
extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
extern crate dotenv;
use actix_web::{web, App, HttpServer};
#[actix_web::main]
async fn main() -> std::io::Result<()> {
db::run_migrations();
HttpServer::new(|| {
App::new()
.route(
"/api/v1/schedules",
web::get().to(handlers::v1::schedules::index),
)
.route(
"/api/v1/schedules",
web::post().to(handlers::v1::schedules::add),
)
.route(
"/api/v1/schedules/{id}",
web::get().to(handlers::v1::schedules::get),
)
.route(
"/api/v1/schedules/{id}",
web::delete().to(handlers::v1::schedules::delete),
)
})
.bind("127.0.0.1:5000")?
.run()
.await
}