Add much stuff for rewrite
This commit is contained in:
parent
4261141c3a
commit
bd44dc3183
37 changed files with 374 additions and 1490 deletions
38
.drone.yml
38
.drone.yml
|
@ -1,38 +0,0 @@
|
||||||
kind: pipeline
|
|
||||||
name: default
|
|
||||||
|
|
||||||
workspace:
|
|
||||||
path: /drone/src
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: build
|
|
||||||
image: registry.serguzim.me/emgauwa/builder:rust
|
|
||||||
volumes:
|
|
||||||
- name: docker-socket
|
|
||||||
path: /var/run/docker.sock
|
|
||||||
pull: always
|
|
||||||
commands:
|
|
||||||
- cross build --release --target arm-unknown-linux-musleabihf
|
|
||||||
- ls -lh ./target/arm-unknown-linux-musleabihf/release/emgauwa-core
|
|
||||||
|
|
||||||
- name: gitea_release
|
|
||||||
image: plugins/gitea-release
|
|
||||||
settings:
|
|
||||||
api_key:
|
|
||||||
from_secret: gitea_token
|
|
||||||
base_url: https://git.serguzim.me
|
|
||||||
title: ${DRONE_TAG}
|
|
||||||
when:
|
|
||||||
event: tag
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
ref:
|
|
||||||
include:
|
|
||||||
- refs/heads/main
|
|
||||||
- refs/heads/testing
|
|
||||||
- refs/tags/**
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
- name: docker-socket
|
|
||||||
host:
|
|
||||||
path: /var/run/docker.sock
|
|
BIN
Cargo.lock
generated
BIN
Cargo.lock
generated
Binary file not shown.
22
Cargo.toml
22
Cargo.toml
|
@ -10,15 +10,23 @@ authors = ["Tobias Reisinger <tobias@msrg.cc>"]
|
||||||
#panic = 'abort'
|
#panic = 'abort'
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-web = "3"
|
actix-web = "4.4"
|
||||||
chrono = { version = "0.4", features = ["serde"] }
|
|
||||||
diesel = { version = "1.4", features = ["sqlite", "uuid"] }
|
diesel = { version = "2.1", features = ["uuid", "sqlite"] }
|
||||||
diesel_migrations = "1.4"
|
diesel_migrations = "2.1"
|
||||||
|
|
||||||
dotenv = "0.15"
|
dotenv = "0.15"
|
||||||
env_logger = "0.9.0"
|
config = "0.13"
|
||||||
|
lazy_static = { version = "1.4.0", features = [] }
|
||||||
|
|
||||||
|
simple_logger = "4.2"
|
||||||
|
log = "0.4"
|
||||||
|
|
||||||
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
|
uuid = { version = "1.5", features = ["serde", "v4"] }
|
||||||
|
|
||||||
serde = "1.0"
|
serde = "1.0"
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
serde_derive = "1.0"
|
serde_derive = "1.0"
|
||||||
|
|
||||||
libsqlite3-sys = { version = "*", features = ["bundled"] }
|
libsqlite3-sys = { version = "*", features = ["bundled"] }
|
||||||
uuid = { version = "0.8", features = ["serde", "v4"] }
|
|
||||||
wiringpi = { git = "https://github.com/jvandervelden/rust-wiringpi.git " }
|
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
[![Build Status](https://ci.serguzim.me/api/badges/emgauwa/core/status.svg)](https://ci.serguzim.me/emgauwa/core)
|
[![Build Status](https://ci.serguzim.me/api/badges/emgauwa/core/status.svg)](https://ci.serguzim.me/emgauwa/core)
|
||||||
|
|
3
build.rs
3
build.rs
|
@ -1,4 +1,3 @@
|
||||||
fn main() {
|
fn main() {
|
||||||
#[cfg(any(target_arch = "arm", target_arch = "aarch64"))]
|
println!("cargo:rerun-if-changed=migrations");
|
||||||
println!("cargo:rustc-link-lib=dylib=wiringPi");
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,17 +0,0 @@
|
||||||
database = "emgauwa-core.sqlite"
|
|
||||||
content-dir = "/usr/share/webapps/emgauwa"
|
|
||||||
|
|
||||||
[not-found]
|
|
||||||
file = "404.html"
|
|
||||||
content = "404 - NOT FOUND"
|
|
||||||
content-type = "text/plain"
|
|
||||||
|
|
||||||
[bind]
|
|
||||||
http = "127.0.0.1:5000"
|
|
||||||
mqtt = "127.0.0.1:1883"
|
|
||||||
|
|
||||||
[logging]
|
|
||||||
level = "debug"
|
|
||||||
file = "stdout"
|
|
||||||
|
|
||||||
# vim: set ft=toml:
|
|
8
emgauwa-core.toml
Normal file
8
emgauwa-core.toml
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
port = 5000
|
||||||
|
host = "127.0.0.1"
|
||||||
|
|
||||||
|
database = "sqlite://emgauwa-core.sqlite"
|
||||||
|
|
||||||
|
[logging]
|
||||||
|
level = "DEBUG"
|
||||||
|
file = "stdout"
|
|
@ -61,8 +61,6 @@ CREATE TABLE schedules
|
||||||
BLOB
|
BLOB
|
||||||
NOT NULL
|
NOT NULL
|
||||||
);
|
);
|
||||||
INSERT INTO schedules (uid, name, periods) VALUES (x'00', 'off', x'');
|
|
||||||
INSERT INTO schedules (uid, name, periods) VALUES (x'01', 'on', x'00000000');
|
|
||||||
|
|
||||||
CREATE TABLE tags
|
CREATE TABLE tags
|
||||||
(
|
(
|
||||||
|
|
|
@ -1,10 +0,0 @@
|
||||||
-- a key-value table used for the json-cache
|
|
||||||
|
|
||||||
CREATE TABLE cache (
|
|
||||||
key STRING
|
|
||||||
PRIMARY KEY,
|
|
||||||
value TEXT
|
|
||||||
NOT NULL,
|
|
||||||
expiration INT
|
|
||||||
DEFAULT 0
|
|
||||||
);
|
|
|
@ -1,83 +0,0 @@
|
||||||
-- base migration
|
|
||||||
|
|
||||||
CREATE TABLE controllers
|
|
||||||
(
|
|
||||||
id INTEGER
|
|
||||||
PRIMARY KEY
|
|
||||||
AUTOINCREMENT,
|
|
||||||
uid BLOB
|
|
||||||
NOT NULL
|
|
||||||
UNIQUE,
|
|
||||||
name VARCHAR(128),
|
|
||||||
ip VARCHAR(16),
|
|
||||||
port INTEGER,
|
|
||||||
relay_count INTEGER,
|
|
||||||
active BOOLEAN
|
|
||||||
NOT NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE relays
|
|
||||||
(
|
|
||||||
id INTEGER
|
|
||||||
PRIMARY KEY
|
|
||||||
AUTOINCREMENT,
|
|
||||||
name VARCHAR(128),
|
|
||||||
number INTEGER
|
|
||||||
NOT NULL,
|
|
||||||
controller_id INTEGER
|
|
||||||
NOT NULL
|
|
||||||
REFERENCES controllers (id)
|
|
||||||
ON DELETE CASCADE
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE schedules
|
|
||||||
(
|
|
||||||
id INTEGER
|
|
||||||
PRIMARY KEY
|
|
||||||
AUTOINCREMENT,
|
|
||||||
uid BLOB
|
|
||||||
NOT NULL
|
|
||||||
UNIQUE,
|
|
||||||
name VARCHAR(128),
|
|
||||||
periods BLOB
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE tags
|
|
||||||
(
|
|
||||||
id INTEGER
|
|
||||||
PRIMARY KEY
|
|
||||||
AUTOINCREMENT,
|
|
||||||
tag VARCHAR(128)
|
|
||||||
NOT NULL
|
|
||||||
UNIQUE
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE junction_tag
|
|
||||||
(
|
|
||||||
tag_id INTEGER
|
|
||||||
NOT NULL
|
|
||||||
REFERENCES tags (id)
|
|
||||||
ON DELETE CASCADE,
|
|
||||||
relay_id INTEGER
|
|
||||||
REFERENCES relays (id)
|
|
||||||
ON DELETE CASCADE,
|
|
||||||
schedule_id INTEGER
|
|
||||||
REFERENCES schedules (id)
|
|
||||||
ON DELETE CASCADE
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE junction_relay_schedule
|
|
||||||
(
|
|
||||||
weekday SMALLINT
|
|
||||||
NOT NULL,
|
|
||||||
relay_id INTEGER
|
|
||||||
REFERENCES relays (id)
|
|
||||||
ON DELETE CASCADE,
|
|
||||||
schedule_id INTEGER
|
|
||||||
DEFAULT 1
|
|
||||||
REFERENCES schedules (id)
|
|
||||||
ON DELETE SET DEFAULT
|
|
||||||
);
|
|
||||||
|
|
||||||
INSERT INTO schedules (uid, name, periods) VALUES (x'6f666600000000000000000000000000', 'off', x'00');
|
|
||||||
INSERT INTO schedules (uid, name, periods) VALUES (x'6f6e0000000000000000000000000000', 'on', x'010000009F05');
|
|
|
@ -1,28 +0,0 @@
|
||||||
-- migration to add macros
|
|
||||||
|
|
||||||
CREATE TABLE macros
|
|
||||||
(
|
|
||||||
id INTEGER
|
|
||||||
PRIMARY KEY
|
|
||||||
AUTOINCREMENT,
|
|
||||||
uid BLOB
|
|
||||||
NOT NULL
|
|
||||||
UNIQUE,
|
|
||||||
name VARCHAR(128)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE macro_actions
|
|
||||||
(
|
|
||||||
macro_id INTEGER
|
|
||||||
NOT NULL
|
|
||||||
REFERENCES macros (id)
|
|
||||||
ON DELETE CASCADE,
|
|
||||||
relay_id INTEGER
|
|
||||||
REFERENCES relays (id)
|
|
||||||
ON DELETE CASCADE,
|
|
||||||
schedule_id INTEGER
|
|
||||||
REFERENCES schedules (id)
|
|
||||||
ON DELETE CASCADE,
|
|
||||||
weekday SMALLINT
|
|
||||||
NOT NULL
|
|
||||||
);
|
|
53
src/db.rs
53
src/db.rs
|
@ -1,8 +1,13 @@
|
||||||
use std::env;
|
use std::env;
|
||||||
|
|
||||||
|
use crate::db::errors::DatabaseError;
|
||||||
|
use crate::db::model_utils::Period;
|
||||||
|
use crate::db::models::{NewSchedule, Periods};
|
||||||
|
use crate::types::EmgauwaUid;
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
use diesel_migrations::embed_migrations;
|
use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness};
|
||||||
use dotenv::dotenv;
|
use dotenv::dotenv;
|
||||||
|
use log::{info, trace};
|
||||||
|
|
||||||
pub mod errors;
|
pub mod errors;
|
||||||
pub mod models;
|
pub mod models;
|
||||||
|
@ -12,7 +17,7 @@ pub mod tag;
|
||||||
|
|
||||||
mod model_utils;
|
mod model_utils;
|
||||||
|
|
||||||
embed_migrations!("migrations");
|
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations");
|
||||||
|
|
||||||
fn get_connection() -> SqliteConnection {
|
fn get_connection() -> SqliteConnection {
|
||||||
dotenv().ok();
|
dotenv().ok();
|
||||||
|
@ -23,6 +28,46 @@ fn get_connection() -> SqliteConnection {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_migrations() {
|
pub fn run_migrations() {
|
||||||
let connection = get_connection();
|
info!("Running migrations");
|
||||||
embedded_migrations::run(&connection).expect("Failed to run migrations.");
|
let mut connection = get_connection();
|
||||||
|
connection
|
||||||
|
.run_pending_migrations(MIGRATIONS)
|
||||||
|
.expect("Failed to run migrations.");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn init_schedule(schedule: &NewSchedule) -> Result<(), DatabaseError> {
|
||||||
|
trace!("Initializing schedule {:?}", schedule.name);
|
||||||
|
match schedules::get_schedule_by_uid(schedule.uid.clone()) {
|
||||||
|
Ok(_) => Ok(()),
|
||||||
|
Err(err) => match err {
|
||||||
|
DatabaseError::NotFound => {
|
||||||
|
trace!("Schedule {:?} not found, inserting", schedule.name);
|
||||||
|
let mut connection = get_connection();
|
||||||
|
diesel::insert_into(schema::schedules::table)
|
||||||
|
.values(schedule)
|
||||||
|
.execute(&mut connection)
|
||||||
|
.map(|_| ())
|
||||||
|
.map_err(DatabaseError::InsertError)
|
||||||
|
}
|
||||||
|
_ => Err(err),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn init(db: &str) {
|
||||||
|
run_migrations();
|
||||||
|
|
||||||
|
init_schedule(&NewSchedule {
|
||||||
|
uid: &EmgauwaUid::Off,
|
||||||
|
name: "Off",
|
||||||
|
periods: &Periods(vec![]),
|
||||||
|
})
|
||||||
|
.expect("Error initializing schedule Off");
|
||||||
|
|
||||||
|
init_schedule(&NewSchedule {
|
||||||
|
uid: &EmgauwaUid::On,
|
||||||
|
name: "On",
|
||||||
|
periods: &Periods(vec![Period::new_on()]),
|
||||||
|
})
|
||||||
|
.expect("Error initializing schedule On");
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,6 +11,7 @@ pub enum DatabaseError {
|
||||||
NotFound,
|
NotFound,
|
||||||
Protected,
|
Protected,
|
||||||
UpdateError(diesel::result::Error),
|
UpdateError(diesel::result::Error),
|
||||||
|
Unknown,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DatabaseError {
|
impl DatabaseError {
|
||||||
|
@ -47,6 +48,7 @@ impl From<&DatabaseError> for String {
|
||||||
DatabaseError::DeleteError => String::from("error on deleting from database"),
|
DatabaseError::DeleteError => String::from("error on deleting from database"),
|
||||||
DatabaseError::Protected => String::from("model is protected"),
|
DatabaseError::Protected => String::from("model is protected"),
|
||||||
DatabaseError::UpdateError(_) => String::from("error on updating the model"),
|
DatabaseError::UpdateError(_) => String::from("error on updating the model"),
|
||||||
|
DatabaseError::Unknown => String::from("unknown error"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,16 +1,14 @@
|
||||||
use crate::db::models::Periods;
|
use crate::db::models::Periods;
|
||||||
use chrono::{NaiveTime, Timelike};
|
use chrono::{NaiveTime, Timelike};
|
||||||
use diesel::backend::Backend;
|
|
||||||
use diesel::deserialize::FromSql;
|
use diesel::deserialize::FromSql;
|
||||||
use diesel::serialize::{IsNull, Output, ToSql};
|
use diesel::serialize::{IsNull, Output, ToSql};
|
||||||
use diesel::sql_types::Binary;
|
use diesel::sql_types::Binary;
|
||||||
use diesel::sqlite::Sqlite;
|
use diesel::sqlite::Sqlite;
|
||||||
use diesel::{deserialize, serialize};
|
use diesel::{deserialize, serialize};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::io::Write;
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, AsExpression, FromSqlRow, PartialEq, Clone)]
|
#[derive(Debug, Serialize, Deserialize, AsExpression, FromSqlRow, PartialEq, Clone)]
|
||||||
#[sql_type = "Binary"]
|
#[diesel(sql_type = Binary)]
|
||||||
pub struct Period {
|
pub struct Period {
|
||||||
#[serde(with = "period_format")]
|
#[serde(with = "period_format")]
|
||||||
pub start: NaiveTime,
|
pub start: NaiveTime,
|
||||||
|
@ -41,23 +39,51 @@ mod period_format {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToSql<Binary, Sqlite> for Periods {
|
impl Period {
|
||||||
fn to_sql<W: Write>(&self, out: &mut Output<W, Sqlite>) -> serialize::Result {
|
pub fn new(start: NaiveTime, end: NaiveTime) -> Self {
|
||||||
for period in self.0.iter() {
|
Period { start, end }
|
||||||
out.write_all(&[
|
}
|
||||||
period.start.hour() as u8,
|
|
||||||
period.start.minute() as u8,
|
pub fn new_on() -> Self {
|
||||||
period.end.hour() as u8,
|
Period {
|
||||||
period.end.minute() as u8,
|
start: NaiveTime::from_hms_opt(0, 0, 0).unwrap(),
|
||||||
])?;
|
end: NaiveTime::from_hms_opt(0, 0, 0).unwrap(),
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToSql<Binary, Sqlite> for Periods
|
||||||
|
where
|
||||||
|
Vec<u8>: ToSql<Binary, Sqlite>,
|
||||||
|
{
|
||||||
|
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Sqlite>) -> serialize::Result {
|
||||||
|
let periods_u8: Vec<u8> = self
|
||||||
|
.0
|
||||||
|
.iter()
|
||||||
|
.flat_map(|period| {
|
||||||
|
let vec = vec![
|
||||||
|
period.start.hour() as u8,
|
||||||
|
period.start.minute() as u8,
|
||||||
|
period.end.hour() as u8,
|
||||||
|
period.end.minute() as u8,
|
||||||
|
];
|
||||||
|
vec
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
out.set_value(periods_u8);
|
||||||
|
|
||||||
Ok(IsNull::No)
|
Ok(IsNull::No)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromSql<Binary, Sqlite> for Periods {
|
impl<DB> FromSql<Binary, DB> for Periods
|
||||||
fn from_sql(bytes: Option<&<Sqlite as Backend>::RawValue>) -> deserialize::Result<Self> {
|
where
|
||||||
let blob = bytes.unwrap().read_blob();
|
DB: diesel::backend::Backend,
|
||||||
|
Vec<u8>: FromSql<Binary, DB>,
|
||||||
|
{
|
||||||
|
fn from_sql(bytes: DB::RawValue<'_>) -> deserialize::Result<Self> {
|
||||||
|
let blob: Vec<u8> = Vec::from_sql(bytes).unwrap();
|
||||||
|
|
||||||
let mut vec = Vec::new();
|
let mut vec = Vec::new();
|
||||||
for i in (3..blob.len()).step_by(4) {
|
for i in (3..blob.len()).step_by(4) {
|
||||||
|
@ -66,8 +92,8 @@ impl FromSql<Binary, Sqlite> for Periods {
|
||||||
let end_val_h: u32 = blob[i - 1] as u32;
|
let end_val_h: u32 = blob[i - 1] as u32;
|
||||||
let end_val_m: u32 = blob[i] as u32;
|
let end_val_m: u32 = blob[i] as u32;
|
||||||
vec.push(Period {
|
vec.push(Period {
|
||||||
start: NaiveTime::from_hms(start_val_h, start_val_m, 0),
|
start: NaiveTime::from_hms_opt(start_val_h, start_val_m, 0).unwrap(),
|
||||||
end: NaiveTime::from_hms(end_val_h, end_val_m, 0),
|
end: NaiveTime::from_hms_opt(end_val_h, end_val_m, 0).unwrap(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
Ok(Periods(vec))
|
Ok(Periods(vec))
|
||||||
|
|
|
@ -23,7 +23,7 @@ pub struct Schedule {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Insertable)]
|
#[derive(Insertable)]
|
||||||
#[table_name = "schedules"]
|
#[diesel(table_name = crate::db::schema::schedules)]
|
||||||
pub struct NewSchedule<'a> {
|
pub struct NewSchedule<'a> {
|
||||||
pub uid: &'a EmgauwaUid,
|
pub uid: &'a EmgauwaUid,
|
||||||
pub name: &'a str,
|
pub name: &'a str,
|
||||||
|
@ -31,26 +31,27 @@ pub struct NewSchedule<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, AsExpression, FromSqlRow, PartialEq, Clone)]
|
#[derive(Debug, Serialize, Deserialize, AsExpression, FromSqlRow, PartialEq, Clone)]
|
||||||
#[sql_type = "Binary"]
|
#[diesel(sql_type = Binary)]
|
||||||
pub struct Periods(pub(crate) Vec<Period>);
|
pub struct Periods(pub Vec<Period>);
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Identifiable, Queryable, Clone)]
|
#[derive(Debug, Serialize, Identifiable, Queryable, Clone)]
|
||||||
|
#[diesel(table_name = crate::db::schema::tags)]
|
||||||
pub struct Tag {
|
pub struct Tag {
|
||||||
pub id: i32,
|
pub id: i32,
|
||||||
pub tag: String,
|
pub tag: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Insertable)]
|
#[derive(Insertable)]
|
||||||
#[table_name = "tags"]
|
#[diesel(table_name = crate::db::schema::tags)]
|
||||||
pub struct NewTag<'a> {
|
pub struct NewTag<'a> {
|
||||||
pub tag: &'a str,
|
pub tag: &'a str,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Queryable, Associations, Identifiable)]
|
#[derive(Queryable, Associations, Identifiable)]
|
||||||
#[belongs_to(Relay)]
|
#[diesel(belongs_to(Relay))]
|
||||||
#[belongs_to(Schedule)]
|
#[diesel(belongs_to(Schedule))]
|
||||||
#[belongs_to(Tag)]
|
#[diesel(belongs_to(Tag))]
|
||||||
#[table_name = "junction_tag"]
|
#[diesel(table_name = crate::db::schema::junction_tag)]
|
||||||
pub struct JunctionTag {
|
pub struct JunctionTag {
|
||||||
pub id: i32,
|
pub id: i32,
|
||||||
pub tag_id: i32,
|
pub tag_id: i32,
|
||||||
|
@ -59,7 +60,7 @@ pub struct JunctionTag {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Insertable)]
|
#[derive(Insertable)]
|
||||||
#[table_name = "junction_tag"]
|
#[diesel(table_name = crate::db::schema::junction_tag)]
|
||||||
pub struct NewJunctionTag {
|
pub struct NewJunctionTag {
|
||||||
pub tag_id: i32,
|
pub tag_id: i32,
|
||||||
pub relay_id: Option<i32>,
|
pub relay_id: Option<i32>,
|
||||||
|
|
|
@ -13,37 +13,37 @@ use crate::db::tag::{create_junction_tag, create_tag};
|
||||||
use crate::db::{get_connection, schema};
|
use crate::db::{get_connection, schema};
|
||||||
|
|
||||||
pub fn get_schedule_tags(schedule: &Schedule) -> Vec<String> {
|
pub fn get_schedule_tags(schedule: &Schedule) -> Vec<String> {
|
||||||
let connection = get_connection();
|
let mut connection = get_connection();
|
||||||
JunctionTag::belonging_to(schedule)
|
JunctionTag::belonging_to(schedule)
|
||||||
.inner_join(schema::tags::dsl::tags)
|
.inner_join(schema::tags::dsl::tags)
|
||||||
.select(schema::tags::tag)
|
.select(schema::tags::tag)
|
||||||
.load::<String>(&connection)
|
.load::<String>(&mut connection)
|
||||||
.expect("Error loading tags")
|
.expect("Error loading tags")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_schedules() -> Vec<Schedule> {
|
pub fn get_schedules() -> Vec<Schedule> {
|
||||||
let connection = get_connection();
|
let mut connection = get_connection();
|
||||||
schedules
|
schedules
|
||||||
.load::<Schedule>(&connection)
|
.load::<Schedule>(&mut connection)
|
||||||
.expect("Error loading schedules")
|
.expect("Error loading schedules")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<Schedule, DatabaseError> {
|
pub fn get_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<Schedule, DatabaseError> {
|
||||||
let connection = get_connection();
|
let mut connection = get_connection();
|
||||||
let result = schedules
|
let result = schedules
|
||||||
.filter(schema::schedules::uid.eq(filter_uid))
|
.filter(schema::schedules::uid.eq(filter_uid))
|
||||||
.first::<Schedule>(&connection)
|
.first::<Schedule>(&mut connection)
|
||||||
.or(Err(DatabaseError::NotFound))?;
|
.or(Err(DatabaseError::NotFound))?;
|
||||||
|
|
||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_schedules_by_tag(tag: &Tag) -> Vec<Schedule> {
|
pub fn get_schedules_by_tag(tag: &Tag) -> Vec<Schedule> {
|
||||||
let connection = get_connection();
|
let mut connection = get_connection();
|
||||||
JunctionTag::belonging_to(tag)
|
JunctionTag::belonging_to(tag)
|
||||||
.inner_join(schedules)
|
.inner_join(schedules)
|
||||||
.select(schema::schedules::all_columns)
|
.select(schema::schedules::all_columns)
|
||||||
.load::<Schedule>(&connection)
|
.load::<Schedule>(&mut connection)
|
||||||
.expect("Error loading tags")
|
.expect("Error loading tags")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -54,9 +54,9 @@ pub fn delete_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<(), DatabaseErro
|
||||||
EmgauwaUid::Any(_) => Ok(filter_uid),
|
EmgauwaUid::Any(_) => Ok(filter_uid),
|
||||||
}?;
|
}?;
|
||||||
|
|
||||||
let connection = get_connection();
|
let mut connection = get_connection();
|
||||||
match diesel::delete(schedules.filter(schema::schedules::uid.eq(filter_uid)))
|
match diesel::delete(schedules.filter(schema::schedules::uid.eq(filter_uid)))
|
||||||
.execute(&connection)
|
.execute(&mut connection)
|
||||||
{
|
{
|
||||||
Ok(rows) => {
|
Ok(rows) => {
|
||||||
if rows != 0 {
|
if rows != 0 {
|
||||||
|
@ -70,7 +70,7 @@ pub fn delete_schedule_by_uid(filter_uid: EmgauwaUid) -> Result<(), DatabaseErro
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_schedule(new_name: &str, new_periods: &Periods) -> Result<Schedule, DatabaseError> {
|
pub fn create_schedule(new_name: &str, new_periods: &Periods) -> Result<Schedule, DatabaseError> {
|
||||||
let connection = get_connection();
|
let mut connection = get_connection();
|
||||||
|
|
||||||
let new_schedule = NewSchedule {
|
let new_schedule = NewSchedule {
|
||||||
uid: &EmgauwaUid::default(),
|
uid: &EmgauwaUid::default(),
|
||||||
|
@ -80,12 +80,12 @@ pub fn create_schedule(new_name: &str, new_periods: &Periods) -> Result<Schedule
|
||||||
|
|
||||||
diesel::insert_into(schedules)
|
diesel::insert_into(schedules)
|
||||||
.values(&new_schedule)
|
.values(&new_schedule)
|
||||||
.execute(&connection)
|
.execute(&mut connection)
|
||||||
.map_err(DatabaseError::InsertError)?;
|
.map_err(DatabaseError::InsertError)?;
|
||||||
|
|
||||||
let result = schedules
|
let result = schedules
|
||||||
.find(sql("last_insert_rowid()"))
|
.find(sql("last_insert_rowid()"))
|
||||||
.get_result::<Schedule>(&connection)
|
.get_result::<Schedule>(&mut connection)
|
||||||
.or(Err(DatabaseError::InsertGetError))?;
|
.or(Err(DatabaseError::InsertGetError))?;
|
||||||
|
|
||||||
Ok(result)
|
Ok(result)
|
||||||
|
@ -96,7 +96,7 @@ pub fn update_schedule(
|
||||||
new_name: &str,
|
new_name: &str,
|
||||||
new_periods: &Periods,
|
new_periods: &Periods,
|
||||||
) -> Result<Schedule, DatabaseError> {
|
) -> Result<Schedule, DatabaseError> {
|
||||||
let connection = get_connection();
|
let mut connection = get_connection();
|
||||||
|
|
||||||
let new_periods = match schedule.uid {
|
let new_periods = match schedule.uid {
|
||||||
EmgauwaUid::Off | EmgauwaUid::On => schedule.periods.borrow(),
|
EmgauwaUid::Off | EmgauwaUid::On => schedule.periods.borrow(),
|
||||||
|
@ -108,21 +108,21 @@ pub fn update_schedule(
|
||||||
schema::schedules::name.eq(new_name),
|
schema::schedules::name.eq(new_name),
|
||||||
schema::schedules::periods.eq(new_periods),
|
schema::schedules::periods.eq(new_periods),
|
||||||
))
|
))
|
||||||
.execute(&connection)
|
.execute(&mut connection)
|
||||||
.map_err(DatabaseError::UpdateError)?;
|
.map_err(DatabaseError::UpdateError)?;
|
||||||
|
|
||||||
get_schedule_by_uid(schedule.uid.clone())
|
get_schedule_by_uid(schedule.uid.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_schedule_tags(schedule: &Schedule, new_tags: &[String]) -> Result<(), DatabaseError> {
|
pub fn set_schedule_tags(schedule: &Schedule, new_tags: &[String]) -> Result<(), DatabaseError> {
|
||||||
let connection = get_connection();
|
let mut connection = get_connection();
|
||||||
diesel::delete(junction_tag.filter(schema::junction_tag::schedule_id.eq(schedule.id)))
|
diesel::delete(junction_tag.filter(schema::junction_tag::schedule_id.eq(schedule.id)))
|
||||||
.execute(&connection)
|
.execute(&mut connection)
|
||||||
.or(Err(DatabaseError::DeleteError))?;
|
.or(Err(DatabaseError::DeleteError))?;
|
||||||
|
|
||||||
let mut database_tags: Vec<Tag> = tags
|
let mut database_tags: Vec<Tag> = tags
|
||||||
.filter(schema::tags::tag.eq_any(new_tags))
|
.filter(schema::tags::tag.eq_any(new_tags))
|
||||||
.load::<Tag>(&connection)
|
.load::<Tag>(&mut connection)
|
||||||
.expect("Error loading tags");
|
.expect("Error loading tags");
|
||||||
|
|
||||||
// create missing tags
|
// create missing tags
|
||||||
|
|
|
@ -8,29 +8,29 @@ use crate::db::schema::tags::dsl::tags;
|
||||||
use crate::db::{get_connection, schema};
|
use crate::db::{get_connection, schema};
|
||||||
|
|
||||||
pub fn create_tag(new_tag: &str) -> Result<Tag, DatabaseError> {
|
pub fn create_tag(new_tag: &str) -> Result<Tag, DatabaseError> {
|
||||||
let connection = get_connection();
|
let mut connection = get_connection();
|
||||||
|
|
||||||
let new_tag = NewTag { tag: new_tag };
|
let new_tag = NewTag { tag: new_tag };
|
||||||
|
|
||||||
diesel::insert_into(tags)
|
diesel::insert_into(tags)
|
||||||
.values(&new_tag)
|
.values(&new_tag)
|
||||||
.execute(&connection)
|
.execute(&mut connection)
|
||||||
.map_err(DatabaseError::InsertError)?;
|
.map_err(DatabaseError::InsertError)?;
|
||||||
|
|
||||||
let result = tags
|
let result = tags
|
||||||
.find(sql("last_insert_rowid()"))
|
.find(sql("last_insert_rowid()"))
|
||||||
.get_result::<Tag>(&connection)
|
.get_result::<Tag>(&mut connection)
|
||||||
.or(Err(DatabaseError::InsertGetError))?;
|
.or(Err(DatabaseError::InsertGetError))?;
|
||||||
|
|
||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_tag(target_tag: &str) -> Result<Tag, DatabaseError> {
|
pub fn get_tag(target_tag: &str) -> Result<Tag, DatabaseError> {
|
||||||
let connection = get_connection();
|
let mut connection = get_connection();
|
||||||
|
|
||||||
let result = tags
|
let result = tags
|
||||||
.filter(schema::tags::tag.eq(target_tag))
|
.filter(schema::tags::tag.eq(target_tag))
|
||||||
.first::<Tag>(&connection)
|
.first::<Tag>(&mut connection)
|
||||||
.or(Err(DatabaseError::NotFound))?;
|
.or(Err(DatabaseError::NotFound))?;
|
||||||
|
|
||||||
Ok(result)
|
Ok(result)
|
||||||
|
@ -41,7 +41,7 @@ pub fn create_junction_tag(
|
||||||
target_relay: Option<&Relay>,
|
target_relay: Option<&Relay>,
|
||||||
target_schedule: Option<&Schedule>,
|
target_schedule: Option<&Schedule>,
|
||||||
) -> Result<JunctionTag, DatabaseError> {
|
) -> Result<JunctionTag, DatabaseError> {
|
||||||
let connection = get_connection();
|
let mut connection = get_connection();
|
||||||
|
|
||||||
let new_junction_tag = NewJunctionTag {
|
let new_junction_tag = NewJunctionTag {
|
||||||
relay_id: target_relay.map(|r| r.id),
|
relay_id: target_relay.map(|r| r.id),
|
||||||
|
@ -51,12 +51,12 @@ pub fn create_junction_tag(
|
||||||
|
|
||||||
diesel::insert_into(junction_tag)
|
diesel::insert_into(junction_tag)
|
||||||
.values(&new_junction_tag)
|
.values(&new_junction_tag)
|
||||||
.execute(&connection)
|
.execute(&mut connection)
|
||||||
.map_err(DatabaseError::InsertError)?;
|
.map_err(DatabaseError::InsertError)?;
|
||||||
|
|
||||||
let result = junction_tag
|
let result = junction_tag
|
||||||
.find(sql("last_insert_rowid()"))
|
.find(sql("last_insert_rowid()"))
|
||||||
.get_result::<JunctionTag>(&connection)
|
.get_result::<JunctionTag>(&mut connection)
|
||||||
.or(Err(DatabaseError::InsertGetError))?;
|
.or(Err(DatabaseError::InsertGetError))?;
|
||||||
|
|
||||||
Ok(result)
|
Ok(result)
|
||||||
|
|
|
@ -28,7 +28,8 @@ pub async fn index() -> impl Responder {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/api/v1/schedules/tag/{tag}")]
|
#[get("/api/v1/schedules/tag/{tag}")]
|
||||||
pub async fn tagged(web::Path((tag,)): web::Path<(String,)>) -> impl Responder {
|
pub async fn tagged(path: web::Path<(String,)>) -> impl Responder {
|
||||||
|
let (tag,) = path.into_inner();
|
||||||
let tag_db = get_tag(&tag);
|
let tag_db = get_tag(&tag);
|
||||||
if tag_db.is_err() {
|
if tag_db.is_err() {
|
||||||
return HttpResponse::from(tag_db.unwrap_err());
|
return HttpResponse::from(tag_db.unwrap_err());
|
||||||
|
@ -42,7 +43,8 @@ pub async fn tagged(web::Path((tag,)): web::Path<(String,)>) -> impl Responder {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/api/v1/schedules/{schedule_id}")]
|
#[get("/api/v1/schedules/{schedule_id}")]
|
||||||
pub async fn show(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Responder {
|
pub async fn show(path: web::Path<(String,)>) -> impl Responder {
|
||||||
|
let (schedule_uid,) = path.into_inner();
|
||||||
let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid));
|
let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid));
|
||||||
|
|
||||||
match emgauwa_uid {
|
match emgauwa_uid {
|
||||||
|
@ -108,9 +110,10 @@ pub async fn add_list(data: web::Json<Vec<RequestSchedule>>) -> impl Responder {
|
||||||
|
|
||||||
#[put("/api/v1/schedules/{schedule_id}")]
|
#[put("/api/v1/schedules/{schedule_id}")]
|
||||||
pub async fn update(
|
pub async fn update(
|
||||||
web::Path((schedule_uid,)): web::Path<(String,)>,
|
path: web::Path<(String,)>,
|
||||||
data: web::Json<RequestSchedule>,
|
data: web::Json<RequestSchedule>,
|
||||||
) -> impl Responder {
|
) -> impl Responder {
|
||||||
|
let (schedule_uid,) = path.into_inner();
|
||||||
let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid));
|
let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid));
|
||||||
if emgauwa_uid.is_err() {
|
if emgauwa_uid.is_err() {
|
||||||
return HttpResponse::from(emgauwa_uid.unwrap_err());
|
return HttpResponse::from(emgauwa_uid.unwrap_err());
|
||||||
|
@ -138,7 +141,8 @@ pub async fn update(
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("/api/v1/schedules/{schedule_id}")]
|
#[delete("/api/v1/schedules/{schedule_id}")]
|
||||||
pub async fn delete(web::Path((schedule_uid,)): web::Path<(String,)>) -> impl Responder {
|
pub async fn delete(path: web::Path<(String,)>) -> impl Responder {
|
||||||
|
let (schedule_uid,) = path.into_inner();
|
||||||
let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid));
|
let emgauwa_uid = EmgauwaUid::try_from(schedule_uid.as_str()).or(Err(HandlerError::BadUid));
|
||||||
|
|
||||||
match emgauwa_uid {
|
match emgauwa_uid {
|
||||||
|
|
35
src/main.rs
35
src/main.rs
|
@ -1,40 +1,45 @@
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate diesel;
|
extern crate diesel;
|
||||||
#[macro_use]
|
|
||||||
extern crate diesel_migrations;
|
extern crate diesel_migrations;
|
||||||
extern crate core;
|
|
||||||
extern crate dotenv;
|
extern crate dotenv;
|
||||||
|
|
||||||
use actix_web::middleware::normalize::TrailingSlash;
|
use actix_web::middleware::TrailingSlash;
|
||||||
use actix_web::{middleware, web, App, HttpServer};
|
use actix_web::{middleware, web, App, HttpServer};
|
||||||
use env_logger::{Builder, Env};
|
use log::{trace, LevelFilter};
|
||||||
use wiringpi::pin::Value::High;
|
use simple_logger::SimpleLogger;
|
||||||
|
use std::fmt::format;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
mod db;
|
mod db;
|
||||||
mod handlers;
|
mod handlers;
|
||||||
mod return_models;
|
mod return_models;
|
||||||
|
mod settings;
|
||||||
mod types;
|
mod types;
|
||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
#[actix_web::main]
|
#[actix_web::main]
|
||||||
async fn main() -> std::io::Result<()> {
|
async fn main() -> std::io::Result<()> {
|
||||||
db::run_migrations();
|
settings::init();
|
||||||
|
let settings = settings::get();
|
||||||
|
|
||||||
Builder::from_env(Env::default().default_filter_or("info")).init();
|
let log_level: LevelFilter = log::LevelFilter::from_str(&settings.logging.level)
|
||||||
|
.unwrap_or_else(|_| panic!("Error parsing log level."));
|
||||||
|
trace!("Log level set to {:?}", log_level);
|
||||||
|
|
||||||
let pi = wiringpi::setup();
|
SimpleLogger::new()
|
||||||
|
.with_level(log_level)
|
||||||
|
.init()
|
||||||
|
.unwrap_or_else(|_| panic!("Error initializing logger."));
|
||||||
|
|
||||||
//Use WiringPi pin 0 as output
|
db::init(&settings.database);
|
||||||
let pin = pi.output_pin(0);
|
|
||||||
pin.digital_write(High);
|
|
||||||
|
|
||||||
HttpServer::new(|| {
|
HttpServer::new(|| {
|
||||||
App::new()
|
App::new()
|
||||||
.wrap(
|
.wrap(
|
||||||
middleware::DefaultHeaders::new()
|
middleware::DefaultHeaders::new()
|
||||||
.header("Access-Control-Allow-Origin", "*")
|
.add(("Access-Control-Allow-Origin", "*"))
|
||||||
.header("Access-Control-Allow-Headers", "*")
|
.add(("Access-Control-Allow-Headers", "*"))
|
||||||
.header("Access-Control-Allow-Methods", "*"),
|
.add(("Access-Control-Allow-Methods", "*")),
|
||||||
)
|
)
|
||||||
.wrap(middleware::Logger::default())
|
.wrap(middleware::Logger::default())
|
||||||
.wrap(middleware::NormalizePath::new(TrailingSlash::Trim))
|
.wrap(middleware::NormalizePath::new(TrailingSlash::Trim))
|
||||||
|
@ -47,7 +52,7 @@ async fn main() -> std::io::Result<()> {
|
||||||
.service(handlers::v1::schedules::update)
|
.service(handlers::v1::schedules::update)
|
||||||
.service(handlers::v1::schedules::delete)
|
.service(handlers::v1::schedules::delete)
|
||||||
})
|
})
|
||||||
.bind("127.0.0.1:5000")?
|
.bind(format!("{}:{}", settings.host, settings.port))?
|
||||||
.run()
|
.run()
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
66
src/settings.rs
Normal file
66
src/settings.rs
Normal file
|
@ -0,0 +1,66 @@
|
||||||
|
use config::Config;
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use serde_derive::Deserialize;
|
||||||
|
use std::sync::RwLock;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Deserialize)]
|
||||||
|
#[serde(default)]
|
||||||
|
#[allow(unused)]
|
||||||
|
pub struct Logging {
|
||||||
|
pub level: String,
|
||||||
|
pub file: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Deserialize)]
|
||||||
|
#[serde(default)]
|
||||||
|
#[allow(unused)]
|
||||||
|
pub struct Settings {
|
||||||
|
pub database: String,
|
||||||
|
pub port: u16,
|
||||||
|
pub host: String,
|
||||||
|
pub logging: Logging,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Settings {
|
||||||
|
fn default() -> Self {
|
||||||
|
Settings {
|
||||||
|
database: String::from("sqlite://emgauwa-core.sqlite"),
|
||||||
|
port: 5000,
|
||||||
|
host: String::from("127.0.0.1"),
|
||||||
|
logging: Logging::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Logging {
|
||||||
|
fn default() -> Self {
|
||||||
|
Logging {
|
||||||
|
level: String::from("info"),
|
||||||
|
file: String::from("stdout"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref SETTINGS: RwLock<Settings> = RwLock::new(Settings::default());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn init() {
|
||||||
|
let settings = Config::builder()
|
||||||
|
.add_source(config::File::with_name("emgauwa-core"))
|
||||||
|
.add_source(
|
||||||
|
config::Environment::with_prefix("EMGAUWA")
|
||||||
|
.prefix_separator("_")
|
||||||
|
.separator("__"),
|
||||||
|
)
|
||||||
|
.build()
|
||||||
|
.unwrap()
|
||||||
|
.try_deserialize::<Settings>()
|
||||||
|
.unwrap_or_else(|_| panic!("Error reading settings."));
|
||||||
|
|
||||||
|
*SETTINGS.write().unwrap() = settings;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get() -> Settings {
|
||||||
|
SETTINGS.read().unwrap().clone()
|
||||||
|
}
|
118
src/types.rs
118
src/types.rs
|
@ -1,124 +1,12 @@
|
||||||
use std::convert::TryFrom;
|
|
||||||
use std::fmt::{Debug, Formatter};
|
|
||||||
use std::io::Write;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use diesel::backend::Backend;
|
|
||||||
use diesel::deserialize::FromSql;
|
|
||||||
use diesel::serialize::{IsNull, Output, ToSql};
|
|
||||||
use diesel::sql_types::Binary;
|
use diesel::sql_types::Binary;
|
||||||
use diesel::sqlite::Sqlite;
|
|
||||||
use diesel::{deserialize, serialize};
|
|
||||||
use serde::{Serialize, Serializer};
|
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
pub mod emgauwa_uid;
|
||||||
|
|
||||||
#[derive(AsExpression, FromSqlRow, PartialEq, Clone)]
|
#[derive(AsExpression, FromSqlRow, PartialEq, Clone)]
|
||||||
#[sql_type = "Binary"]
|
#[diesel(sql_type = Binary)]
|
||||||
pub enum EmgauwaUid {
|
pub enum EmgauwaUid {
|
||||||
Off,
|
Off,
|
||||||
On,
|
On,
|
||||||
Any(Uuid),
|
Any(Uuid),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EmgauwaUid {
|
|
||||||
const OFF_STR: &'static str = "off";
|
|
||||||
const ON_STR: &'static str = "on";
|
|
||||||
const OFF_U8: u8 = 0;
|
|
||||||
const ON_U8: u8 = 1;
|
|
||||||
const OFF_U128: u128 = 0;
|
|
||||||
const ON_U128: u128 = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for EmgauwaUid {
|
|
||||||
fn default() -> Self {
|
|
||||||
EmgauwaUid::Any(Uuid::new_v4())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Debug for EmgauwaUid {
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
EmgauwaUid::Off => EmgauwaUid::OFF_STR.fmt(f),
|
|
||||||
EmgauwaUid::On => EmgauwaUid::ON_STR.fmt(f),
|
|
||||||
EmgauwaUid::Any(value) => value.fmt(f),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToSql<Binary, Sqlite> for EmgauwaUid {
|
|
||||||
fn to_sql<W: Write>(&self, out: &mut Output<W, Sqlite>) -> serialize::Result {
|
|
||||||
match self {
|
|
||||||
EmgauwaUid::Off => out.write_all(&[EmgauwaUid::OFF_U8])?,
|
|
||||||
EmgauwaUid::On => out.write_all(&[EmgauwaUid::ON_U8])?,
|
|
||||||
EmgauwaUid::Any(value) => out.write_all(value.as_bytes())?,
|
|
||||||
}
|
|
||||||
Ok(IsNull::No)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromSql<Binary, Sqlite> for EmgauwaUid {
|
|
||||||
fn from_sql(bytes: Option<&<Sqlite as Backend>::RawValue>) -> deserialize::Result<Self> {
|
|
||||||
match bytes {
|
|
||||||
None => Ok(EmgauwaUid::default()),
|
|
||||||
Some(value) => match value.read_blob() {
|
|
||||||
[EmgauwaUid::OFF_U8] => Ok(EmgauwaUid::Off),
|
|
||||||
[EmgauwaUid::ON_U8] => Ok(EmgauwaUid::On),
|
|
||||||
value_bytes => Ok(EmgauwaUid::Any(Uuid::from_slice(value_bytes).unwrap())),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Serialize for EmgauwaUid {
|
|
||||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
|
||||||
where
|
|
||||||
S: Serializer,
|
|
||||||
{
|
|
||||||
String::from(self).serialize(serializer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Uuid> for EmgauwaUid {
|
|
||||||
fn from(uid: Uuid) -> EmgauwaUid {
|
|
||||||
match uid.as_u128() {
|
|
||||||
EmgauwaUid::OFF_U128 => EmgauwaUid::Off,
|
|
||||||
EmgauwaUid::ON_U128 => EmgauwaUid::On,
|
|
||||||
_ => EmgauwaUid::Any(uid),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<&str> for EmgauwaUid {
|
|
||||||
type Error = uuid::Error;
|
|
||||||
|
|
||||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
|
||||||
match value {
|
|
||||||
EmgauwaUid::OFF_STR => Ok(EmgauwaUid::Off),
|
|
||||||
EmgauwaUid::ON_STR => Ok(EmgauwaUid::On),
|
|
||||||
any => match Uuid::from_str(any) {
|
|
||||||
Ok(uuid) => Ok(EmgauwaUid::Any(uuid)),
|
|
||||||
Err(err) => Err(err),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&EmgauwaUid> for Uuid {
|
|
||||||
fn from(emgauwa_uid: &EmgauwaUid) -> Uuid {
|
|
||||||
match emgauwa_uid {
|
|
||||||
EmgauwaUid::Off => uuid::Uuid::from_u128(EmgauwaUid::OFF_U128),
|
|
||||||
EmgauwaUid::On => uuid::Uuid::from_u128(EmgauwaUid::ON_U128),
|
|
||||||
EmgauwaUid::Any(value) => *value,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&EmgauwaUid> for String {
|
|
||||||
fn from(emgauwa_uid: &EmgauwaUid) -> String {
|
|
||||||
match emgauwa_uid {
|
|
||||||
EmgauwaUid::Off => String::from(EmgauwaUid::OFF_STR),
|
|
||||||
EmgauwaUid::On => String::from(EmgauwaUid::ON_STR),
|
|
||||||
EmgauwaUid::Any(value) => value.to_hyphenated().to_string(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
122
src/types/emgauwa_uid.rs
Normal file
122
src/types/emgauwa_uid.rs
Normal file
|
@ -0,0 +1,122 @@
|
||||||
|
use std::convert::TryFrom;
|
||||||
|
use std::fmt::{Debug, Formatter};
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use crate::types::EmgauwaUid;
|
||||||
|
use diesel::backend::Backend;
|
||||||
|
use diesel::deserialize::FromSql;
|
||||||
|
use diesel::serialize::{IsNull, Output, ToSql};
|
||||||
|
use diesel::sql_types::Binary;
|
||||||
|
use diesel::{deserialize, serialize};
|
||||||
|
use serde::{Serialize, Serializer};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
impl EmgauwaUid {
|
||||||
|
const OFF_STR: &'static str = "off";
|
||||||
|
const ON_STR: &'static str = "on";
|
||||||
|
const OFF_U8: u8 = 0;
|
||||||
|
const ON_U8: u8 = 1;
|
||||||
|
const OFF_U128: u128 = 0;
|
||||||
|
const ON_U128: u128 = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for EmgauwaUid {
|
||||||
|
fn default() -> Self {
|
||||||
|
EmgauwaUid::Any(Uuid::new_v4())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Debug for EmgauwaUid {
|
||||||
|
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
EmgauwaUid::Off => EmgauwaUid::OFF_STR.fmt(f),
|
||||||
|
EmgauwaUid::On => EmgauwaUid::ON_STR.fmt(f),
|
||||||
|
EmgauwaUid::Any(value) => value.fmt(f),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<DB> ToSql<Binary, DB> for EmgauwaUid
|
||||||
|
where
|
||||||
|
DB: Backend,
|
||||||
|
[u8]: ToSql<Binary, DB>,
|
||||||
|
{
|
||||||
|
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> serialize::Result {
|
||||||
|
match self {
|
||||||
|
EmgauwaUid::Off => [EmgauwaUid::OFF_U8].to_sql(out)?,
|
||||||
|
EmgauwaUid::On => [EmgauwaUid::ON_U8].to_sql(out)?,
|
||||||
|
EmgauwaUid::Any(value) => value.as_bytes().to_sql(out)?,
|
||||||
|
};
|
||||||
|
Ok(IsNull::No)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<DB> FromSql<Binary, DB> for EmgauwaUid
|
||||||
|
where
|
||||||
|
DB: Backend,
|
||||||
|
Vec<u8>: FromSql<Binary, DB>,
|
||||||
|
{
|
||||||
|
fn from_sql(bytes: DB::RawValue<'_>) -> deserialize::Result<Self> {
|
||||||
|
let blob: Vec<u8> = FromSql::<Binary, DB>::from_sql(bytes)?;
|
||||||
|
|
||||||
|
match blob.as_slice() {
|
||||||
|
[EmgauwaUid::OFF_U8] => Ok(EmgauwaUid::Off),
|
||||||
|
[EmgauwaUid::ON_U8] => Ok(EmgauwaUid::On),
|
||||||
|
value_bytes => Ok(EmgauwaUid::Any(Uuid::from_slice(value_bytes).unwrap())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Serialize for EmgauwaUid {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: Serializer,
|
||||||
|
{
|
||||||
|
String::from(self).serialize(serializer)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Uuid> for EmgauwaUid {
|
||||||
|
fn from(uid: Uuid) -> EmgauwaUid {
|
||||||
|
match uid.as_u128() {
|
||||||
|
EmgauwaUid::OFF_U128 => EmgauwaUid::Off,
|
||||||
|
EmgauwaUid::ON_U128 => EmgauwaUid::On,
|
||||||
|
_ => EmgauwaUid::Any(uid),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<&str> for EmgauwaUid {
|
||||||
|
type Error = uuid::Error;
|
||||||
|
|
||||||
|
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||||
|
match value {
|
||||||
|
EmgauwaUid::OFF_STR => Ok(EmgauwaUid::Off),
|
||||||
|
EmgauwaUid::ON_STR => Ok(EmgauwaUid::On),
|
||||||
|
any => match Uuid::from_str(any) {
|
||||||
|
Ok(uuid) => Ok(EmgauwaUid::Any(uuid)),
|
||||||
|
Err(err) => Err(err),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&EmgauwaUid> for Uuid {
|
||||||
|
fn from(emgauwa_uid: &EmgauwaUid) -> Uuid {
|
||||||
|
match emgauwa_uid {
|
||||||
|
EmgauwaUid::Off => uuid::Uuid::from_u128(EmgauwaUid::OFF_U128),
|
||||||
|
EmgauwaUid::On => uuid::Uuid::from_u128(EmgauwaUid::ON_U128),
|
||||||
|
EmgauwaUid::Any(value) => *value,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&EmgauwaUid> for String {
|
||||||
|
fn from(emgauwa_uid: &EmgauwaUid) -> String {
|
||||||
|
match emgauwa_uid {
|
||||||
|
EmgauwaUid::Off => String::from(EmgauwaUid::OFF_STR),
|
||||||
|
EmgauwaUid::On => String::from(EmgauwaUid::ON_STR),
|
||||||
|
EmgauwaUid::Any(value) => value.as_hyphenated().to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,65 +0,0 @@
|
||||||
[controller]
|
|
||||||
name = new emgauwa device
|
|
||||||
|
|
||||||
: 4422 for testing; 4421 for dev-env; 4420 for testing-env; 4419 for prod-env
|
|
||||||
discovery-port = 4422
|
|
||||||
: 1886 for testing; 1885 for dev-env; 1884 for testing-env; 1883 for prod-env
|
|
||||||
mqtt-port = 1886
|
|
||||||
mqtt-host = localhost
|
|
||||||
|
|
||||||
relay-count = 10
|
|
||||||
database = controller.sqlite
|
|
||||||
log-level = debug
|
|
||||||
log-file = stdout
|
|
||||||
|
|
||||||
[relay-0]
|
|
||||||
driver = piface
|
|
||||||
pin = 0
|
|
||||||
inverted = 0
|
|
||||||
|
|
||||||
[relay-1]
|
|
||||||
driver = piface
|
|
||||||
pin = 1
|
|
||||||
inverted = 0
|
|
||||||
|
|
||||||
[relay-2]
|
|
||||||
driver = gpio
|
|
||||||
pin = 5
|
|
||||||
inverted = 1
|
|
||||||
|
|
||||||
[relay-3]
|
|
||||||
driver = gpio
|
|
||||||
pin = 4
|
|
||||||
inverted = 1
|
|
||||||
|
|
||||||
[relay-4]
|
|
||||||
driver = gpio
|
|
||||||
pin = 3
|
|
||||||
inverted = 1
|
|
||||||
|
|
||||||
[relay-5]
|
|
||||||
driver = gpio
|
|
||||||
pin = 2
|
|
||||||
inverted = 1
|
|
||||||
|
|
||||||
[relay-6]
|
|
||||||
driver = gpio
|
|
||||||
pin = 1
|
|
||||||
inverted = 1
|
|
||||||
pulse-duration = 3
|
|
||||||
|
|
||||||
[relay-7]
|
|
||||||
driver = gpio
|
|
||||||
pin = 0
|
|
||||||
inverted = 1
|
|
||||||
pulse-duration = 3
|
|
||||||
|
|
||||||
[relay-8]
|
|
||||||
driver = gpio
|
|
||||||
pin = 16
|
|
||||||
inverted = 1
|
|
||||||
|
|
||||||
[relay-9]
|
|
||||||
driver = gpio
|
|
||||||
pin = 15
|
|
||||||
inverted = 1
|
|
|
@ -1,16 +0,0 @@
|
||||||
[core]
|
|
||||||
server-port = 5000
|
|
||||||
database = core.sqlite
|
|
||||||
content-dir = /usr/share/webapps/emgauwa
|
|
||||||
not-found-file = 404.html
|
|
||||||
not-found-file-mime = text/html
|
|
||||||
not-found-content = 404 - NOT FOUND
|
|
||||||
not-found-content-type = text/plain
|
|
||||||
|
|
||||||
: 4422 for testing; 4421 for dev-env; 4420 for testing-env; 4419 for prod-env
|
|
||||||
discovery-port = 4422
|
|
||||||
: 1886 for testing; 1885 for dev-env; 1884 for testing-env; 1883 for prod-env
|
|
||||||
mqtt-port = 1886
|
|
||||||
|
|
||||||
log-level = debug
|
|
||||||
log-file = stdout
|
|
|
@ -1,67 +0,0 @@
|
||||||
#!/usr/bin/env sh
|
|
||||||
|
|
||||||
source_dir=$PWD/tests
|
|
||||||
working_dir=$source_dir/testing_latest
|
|
||||||
working_bak=$source_dir/testing_bak
|
|
||||||
|
|
||||||
rm -rf "$working_bak"
|
|
||||||
[ -d "$working_dir" ] && mv "$working_dir" "$working_bak"
|
|
||||||
|
|
||||||
mkdir -p "$working_dir"
|
|
||||||
|
|
||||||
|
|
||||||
cp "${1:-"target/debug/emgauwa-core"}" "$working_dir/core"
|
|
||||||
|
|
||||||
cd "$working_dir" || exit
|
|
||||||
|
|
||||||
#target_branch=$(git rev-parse --abbrev-ref HEAD)
|
|
||||||
|
|
||||||
#if [ -z "$EMGAUWA_CONTROLLER_EXE" ]
|
|
||||||
#then
|
|
||||||
# git clone --quiet ssh://git@git.serguzim.me:3022/emgauwa/controller.git controller || exit
|
|
||||||
# cd ./controller || exit
|
|
||||||
#
|
|
||||||
# git checkout dev >/dev/null 2>&1
|
|
||||||
# git checkout "$target_branch" >/dev/null 2>&1
|
|
||||||
# git checkout "$2" >/dev/null 2>&1
|
|
||||||
#
|
|
||||||
# echo "Building controller on branch $(git rev-parse --abbrev-ref HEAD)"
|
|
||||||
# mkdir build
|
|
||||||
# cd build || exit
|
|
||||||
#
|
|
||||||
# cmake -DWIRING_PI_DEBUG=on .. >/dev/null
|
|
||||||
# make >/dev/null
|
|
||||||
# EMGAUWA_CONTROLLER_EXE=./controller
|
|
||||||
#fi
|
|
||||||
|
|
||||||
#echo "Emgauwa controller: $($EMGAUWA_CONTROLLER_EXE --version)"
|
|
||||||
|
|
||||||
#$EMGAUWA_CONTROLLER_EXE start -c "$source_dir/controller.testing.ini" >"$working_dir/controller.log" 2>&1 &
|
|
||||||
#controller_id=$!
|
|
||||||
|
|
||||||
cd "$working_dir" || exit
|
|
||||||
|
|
||||||
EMGAUWA_CORE_EXE="$working_dir/core"
|
|
||||||
cp "$source_dir/core.testing.ini" "$working_dir/core.ini"
|
|
||||||
|
|
||||||
$EMGAUWA_CORE_EXE start >>"$working_dir/core.log" 2>&1 &
|
|
||||||
core_id=$!
|
|
||||||
|
|
||||||
|
|
||||||
# wait for start
|
|
||||||
if [ -x "$(command -v wait-for-it)" ]
|
|
||||||
then
|
|
||||||
wait-for-it localhost:5000 -t 15
|
|
||||||
else
|
|
||||||
echo "waiting 5 seconds for server"
|
|
||||||
sleep 5;
|
|
||||||
fi
|
|
||||||
|
|
||||||
export PYTHONPATH=$PYTHONPATH:$source_dir/tavern_utils
|
|
||||||
tavern-ci --disable-warnings "$source_dir/tavern_tests"
|
|
||||||
test_result=$?
|
|
||||||
|
|
||||||
#kill $controller_id
|
|
||||||
kill $core_id
|
|
||||||
|
|
||||||
exit $test_result
|
|
|
@ -1,23 +0,0 @@
|
||||||
test_name: "[get_all] Test basic get all requests"
|
|
||||||
|
|
||||||
stages:
|
|
||||||
- name: "[get_all] get all schedules"
|
|
||||||
request:
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/"
|
|
||||||
method: GET
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
|
|
||||||
- name: "[get_all] get all relays"
|
|
||||||
request:
|
|
||||||
url: "http://localhost:5000/api/v1/relays/"
|
|
||||||
method: GET
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
|
|
||||||
- name: "[get_all] get all controllers"
|
|
||||||
request:
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/"
|
|
||||||
method: GET
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
|
@ -1,116 +0,0 @@
|
||||||
test_name: Test basic controller functions
|
|
||||||
|
|
||||||
stages:
|
|
||||||
- name: "[controllers_basic] discover controllers"
|
|
||||||
request:
|
|
||||||
method: POST
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/discover/"
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_controller:multiple
|
|
||||||
save:
|
|
||||||
json:
|
|
||||||
returned_name: "[0].name"
|
|
||||||
returned_id: "[0].id"
|
|
||||||
returned_ip: "[0].ip"
|
|
||||||
|
|
||||||
- name: "[controllers_basic] get controller, check name"
|
|
||||||
request:
|
|
||||||
method: GET
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/{returned_id}"
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_controller:single
|
|
||||||
function: validate_controller:check_id
|
|
||||||
extra_kwargs:
|
|
||||||
id: "{returned_id}"
|
|
||||||
function: validate_controller:check_name
|
|
||||||
extra_kwargs:
|
|
||||||
name: "{returned_name}"
|
|
||||||
|
|
||||||
- name: "[controllers_basic] put controller, check name"
|
|
||||||
request:
|
|
||||||
method: PUT
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/{returned_id}"
|
|
||||||
json:
|
|
||||||
name: "renamed_controller"
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_controller:single
|
|
||||||
function: validate_controller:check_id
|
|
||||||
extra_kwargs:
|
|
||||||
id: "{returned_id}"
|
|
||||||
function: validate_controller:check_name
|
|
||||||
extra_kwargs:
|
|
||||||
name: "{tavern.request_vars.json.name}"
|
|
||||||
save:
|
|
||||||
json:
|
|
||||||
changed_name: "name"
|
|
||||||
|
|
||||||
#- name: "[controllers_basic] put controller, check name and ip"
|
|
||||||
# request:
|
|
||||||
# method: PUT
|
|
||||||
# url: "http://localhost:5000/api/v1/controllers/{returned_id}"
|
|
||||||
# json:
|
|
||||||
# ip: "203.0.113.17"
|
|
||||||
# response:
|
|
||||||
# status_code: 200
|
|
||||||
# verify_response_with:
|
|
||||||
# function: validate_controller:single
|
|
||||||
# function: validate_controller:check_id
|
|
||||||
# extra_kwargs:
|
|
||||||
# id: "{returned_id}"
|
|
||||||
# function: validate_controller:check_ip
|
|
||||||
# extra_kwargs:
|
|
||||||
# ip: "{tavern.request_vars.json.ip}"
|
|
||||||
# save:
|
|
||||||
# json:
|
|
||||||
# changed_ip: "ip"
|
|
||||||
|
|
||||||
- name: "[controllers_basic] delete controller"
|
|
||||||
request:
|
|
||||||
method: DELETE
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/{returned_id}"
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
|
|
||||||
- name: "[controllers_basic] get controller, expect 404"
|
|
||||||
request:
|
|
||||||
method: GET
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/{returned_id}"
|
|
||||||
response:
|
|
||||||
status_code: 404
|
|
||||||
|
|
||||||
- name: "[controllers_basic] discover controllers again"
|
|
||||||
request:
|
|
||||||
method: POST
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/discover/"
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_controller:multiple
|
|
||||||
function: validate_controller:find
|
|
||||||
extra_kwargs:
|
|
||||||
id: "{returned_id}"
|
|
||||||
name: "{changed_name}"
|
|
||||||
|
|
||||||
- name: "[controllers_basic] get controller again, check name"
|
|
||||||
request:
|
|
||||||
method: GET
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/{returned_id}"
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_controller:single
|
|
||||||
function: validate_controller:check_id
|
|
||||||
extra_kwargs:
|
|
||||||
id: "{returned_id}"
|
|
||||||
function: validate_controller:check_name
|
|
||||||
extra_kwargs:
|
|
||||||
name: "{changed_name}"
|
|
||||||
function: validate_controller:check_ip
|
|
||||||
extra_kwargs:
|
|
||||||
ip: "{returned_ip}"
|
|
|
@ -1,42 +0,0 @@
|
||||||
test_name: Test basic controller relays functions
|
|
||||||
|
|
||||||
stages:
|
|
||||||
- name: "[controller_relays_basic] get controllers"
|
|
||||||
request:
|
|
||||||
method: GET
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/"
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_controller:multiple
|
|
||||||
save:
|
|
||||||
json:
|
|
||||||
returned_id: "[0].id"
|
|
||||||
returned_relay_count: "[0].relay_count"
|
|
||||||
|
|
||||||
- name: "[controller_relays_basic] get controller relays, check length"
|
|
||||||
request:
|
|
||||||
method: GET
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/{returned_id}/relays"
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_relay:multiple
|
|
||||||
function: validate_relay:relay_count
|
|
||||||
extra_kwargs:
|
|
||||||
relay_count: !int "{returned_relay_count:d}"
|
|
||||||
|
|
||||||
- name: "[controller_relays_basic] get controller relays, check length"
|
|
||||||
request:
|
|
||||||
method: GET
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/{returned_id}/relays/5"
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_relay:single
|
|
||||||
function: validate_relay:check_controller_id
|
|
||||||
extra_kwargs:
|
|
||||||
name: "{returned_id}"
|
|
||||||
function: validate_relay:check_number
|
|
||||||
extra_kwargs:
|
|
||||||
number: 5
|
|
|
@ -1,99 +0,0 @@
|
||||||
test_name: Test bad controller functions
|
|
||||||
|
|
||||||
stages:
|
|
||||||
- name: "[controllers_bad] get controller with bad id"
|
|
||||||
request:
|
|
||||||
method: GET
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/this_id_is_invalid"
|
|
||||||
response:
|
|
||||||
status_code: 400
|
|
||||||
|
|
||||||
- name: "[controllers_bad] put controller with bad id"
|
|
||||||
request:
|
|
||||||
method: PUT
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/this_id_is_invalid"
|
|
||||||
json:
|
|
||||||
name: "unknown_controller"
|
|
||||||
response:
|
|
||||||
status_code: 400
|
|
||||||
|
|
||||||
- name: "[controllers_bad] delete controller with bad id"
|
|
||||||
request:
|
|
||||||
method: DELETE
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/this_id_is_invalid"
|
|
||||||
json:
|
|
||||||
name: "unknown_controller"
|
|
||||||
response:
|
|
||||||
status_code: 400
|
|
||||||
|
|
||||||
- name: "[controllers_bad] get controller with unknown id"
|
|
||||||
request:
|
|
||||||
method: GET
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/00000000-0000-0000-0000-000000000000"
|
|
||||||
response:
|
|
||||||
status_code: 404
|
|
||||||
|
|
||||||
- name: "[controllers_bad] put controller with unknown id"
|
|
||||||
request:
|
|
||||||
method: PUT
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/00000000-0000-0000-0000-000000000000"
|
|
||||||
json:
|
|
||||||
name: "unknown_controller"
|
|
||||||
response:
|
|
||||||
status_code: 404
|
|
||||||
|
|
||||||
- name: "[controllers_bad] delete controller with unknown id"
|
|
||||||
request:
|
|
||||||
method: DELETE
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/00000000-0000-0000-0000-000000000000"
|
|
||||||
json:
|
|
||||||
name: "unknown_controller"
|
|
||||||
response:
|
|
||||||
status_code: 404
|
|
||||||
|
|
||||||
- name: "[controllers_bad] get controllers to save valid id"
|
|
||||||
request:
|
|
||||||
method: GET
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/"
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_controller:multiple
|
|
||||||
save:
|
|
||||||
json:
|
|
||||||
returned_id: "[0].id"
|
|
||||||
|
|
||||||
- name: "[controllers_bad] put controller with bad body (invalid name)"
|
|
||||||
request:
|
|
||||||
method: PUT
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/{returned_id}"
|
|
||||||
json:
|
|
||||||
name: NULL
|
|
||||||
response:
|
|
||||||
status_code: 400
|
|
||||||
|
|
||||||
- name: "[controllers_bad] put controller with bad body (invalid ip)"
|
|
||||||
request:
|
|
||||||
method: PUT
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/{returned_id}"
|
|
||||||
json:
|
|
||||||
ip: 123
|
|
||||||
response:
|
|
||||||
status_code: 400
|
|
||||||
|
|
||||||
- name: "[controllers_bad] put controller with bad body (invalid IPv4)"
|
|
||||||
request:
|
|
||||||
method: PUT
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/{returned_id}"
|
|
||||||
json:
|
|
||||||
ip: "10.0.0.300"
|
|
||||||
response:
|
|
||||||
status_code: 400
|
|
||||||
|
|
||||||
- name: "[controllers_bad] put controller with bad body (no json)"
|
|
||||||
request:
|
|
||||||
method: PUT
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/{returned_id}"
|
|
||||||
data: "<b>not json</b><i>but html</i>"
|
|
||||||
response:
|
|
||||||
status_code: 400
|
|
|
@ -1,89 +0,0 @@
|
||||||
test_name: Test basic schedule requests
|
|
||||||
|
|
||||||
stages:
|
|
||||||
- name: "[schedules_basic] Make sure we get any response"
|
|
||||||
request:
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/"
|
|
||||||
method: GET
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_schedule:multiple
|
|
||||||
|
|
||||||
- name: "[schedules_basic] post schedule with no periods, expect it to be echoed back"
|
|
||||||
request:
|
|
||||||
method: POST
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/"
|
|
||||||
json:
|
|
||||||
name: "same as off"
|
|
||||||
periods: []
|
|
||||||
tags: []
|
|
||||||
response:
|
|
||||||
status_code: 201
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_schedule:single
|
|
||||||
function: validate_schedule:check_name
|
|
||||||
extra_kwargs:
|
|
||||||
name: "{tavern.request_vars.json.name}"
|
|
||||||
function: validate_schedule:check_periods
|
|
||||||
extra_kwargs:
|
|
||||||
periods: "{tavern.request_vars.json.periods}"
|
|
||||||
|
|
||||||
- name: "[schedules_basic] post schedule, expect it to be echoed back"
|
|
||||||
request:
|
|
||||||
method: POST
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/"
|
|
||||||
json:
|
|
||||||
name: "hello"
|
|
||||||
periods:
|
|
||||||
- start: "00:10"
|
|
||||||
end: "00:20"
|
|
||||||
- start: "00:30"
|
|
||||||
end: "00:40"
|
|
||||||
- start: "00:50"
|
|
||||||
end: "01:00"
|
|
||||||
tags: []
|
|
||||||
response:
|
|
||||||
status_code: 201
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_schedule:single
|
|
||||||
function: validate_schedule:check_name
|
|
||||||
extra_kwargs:
|
|
||||||
name: "{tavern.request_vars.json.name}"
|
|
||||||
function: validate_schedule:check_periods
|
|
||||||
extra_kwargs:
|
|
||||||
periods: "{tavern.request_vars.json.periods}"
|
|
||||||
save:
|
|
||||||
json:
|
|
||||||
returned_name: "name"
|
|
||||||
returned_id: "id"
|
|
||||||
returned_periods: "periods"
|
|
||||||
|
|
||||||
- name: "[schedules_basic] get schedule, check name and some periods"
|
|
||||||
request:
|
|
||||||
method: GET
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/{returned_id}"
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_schedule:single
|
|
||||||
function: validate_schedule:check_name
|
|
||||||
extra_kwargs:
|
|
||||||
name: "{returned_name}"
|
|
||||||
function: validate_schedule:check_periods
|
|
||||||
extra_kwargs:
|
|
||||||
periods: "{returned_periods}"
|
|
||||||
|
|
||||||
- name: "[schedules_basic] delete schedule"
|
|
||||||
request:
|
|
||||||
method: DELETE
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/{returned_id}"
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
|
|
||||||
- name: "[schedules_basic] get deleted schedule, expect 404"
|
|
||||||
request:
|
|
||||||
method: GET
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/{returned_id}"
|
|
||||||
response:
|
|
||||||
status_code: 404
|
|
|
@ -1,74 +0,0 @@
|
||||||
test_name: Test protected schedules requests
|
|
||||||
|
|
||||||
stages:
|
|
||||||
- name: "[schedules_protected] delete protected off schedule; expect forbidden/fail"
|
|
||||||
request:
|
|
||||||
method: DELETE
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/off"
|
|
||||||
response:
|
|
||||||
status_code: 403
|
|
||||||
|
|
||||||
- name: "[schedules_protected] get protected off schedule"
|
|
||||||
request:
|
|
||||||
method: GET
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/off"
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_schedule:single
|
|
||||||
function: validate_schedule:compare_off
|
|
||||||
|
|
||||||
- name: "[schedules_protected] overwrite protected off schedule"
|
|
||||||
request:
|
|
||||||
method: PUT
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/off"
|
|
||||||
json:
|
|
||||||
name: "turned_off"
|
|
||||||
periods:
|
|
||||||
- start: "00:10"
|
|
||||||
end: "00:20"
|
|
||||||
tags: []
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_schedule:single
|
|
||||||
function: validate_schedule:compare_off
|
|
||||||
function: validate_schedule:check_name
|
|
||||||
extra_kwargs:
|
|
||||||
name: "{tavern.request_vars.json.name}"
|
|
||||||
|
|
||||||
- name: "[schedules_protected] delete protected on schedule; expect forbidden/fail"
|
|
||||||
request:
|
|
||||||
method: DELETE
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/on"
|
|
||||||
response:
|
|
||||||
status_code: 403
|
|
||||||
|
|
||||||
- name: get protected on schedule
|
|
||||||
request:
|
|
||||||
method: GET
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/on"
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_schedule:single
|
|
||||||
function: validate_schedule:compare_on
|
|
||||||
|
|
||||||
- name: "[schedules_protected] overwrite protected on schedule"
|
|
||||||
request:
|
|
||||||
method: PUT
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/on"
|
|
||||||
json:
|
|
||||||
name: "turned_on"
|
|
||||||
periods:
|
|
||||||
- start: "16:10"
|
|
||||||
end: "17:20"
|
|
||||||
tags: []
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_schedule:single
|
|
||||||
function: validate_schedule:compare_on
|
|
||||||
function: validate_schedule:check_name
|
|
||||||
extra_kwargs:
|
|
||||||
name: "{tavern.request_vars.json.name}"
|
|
|
@ -1,169 +0,0 @@
|
||||||
test_name: Test bad schedule requests
|
|
||||||
|
|
||||||
stages:
|
|
||||||
- name: "[schedules_bad] get schedule with bad id"
|
|
||||||
request:
|
|
||||||
method: GET
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/this_id_is_invalid"
|
|
||||||
response:
|
|
||||||
status_code: 400
|
|
||||||
|
|
||||||
- name: "[schedules_bad] post schedule with bad body (no json)"
|
|
||||||
request:
|
|
||||||
method: POST
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/"
|
|
||||||
data: "<b>not json</b><i>but html</i>"
|
|
||||||
response:
|
|
||||||
status_code: 400
|
|
||||||
|
|
||||||
- name: "[schedules_bad] post schedule with bad body (no name)"
|
|
||||||
request:
|
|
||||||
method: POST
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/"
|
|
||||||
json:
|
|
||||||
periods:
|
|
||||||
- start: "00:10"
|
|
||||||
end: "00:20"
|
|
||||||
- start: "00:30"
|
|
||||||
end: "00:40"
|
|
||||||
- start: "00:50"
|
|
||||||
end: "01:00"
|
|
||||||
tags: []
|
|
||||||
response:
|
|
||||||
status_code: 400
|
|
||||||
|
|
||||||
- name: "[schedules_bad] post schedule with bad body (name as number)"
|
|
||||||
request:
|
|
||||||
method: POST
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/"
|
|
||||||
json:
|
|
||||||
name: 42
|
|
||||||
periods:
|
|
||||||
- start: "00:10"
|
|
||||||
end: "00:20"
|
|
||||||
- start: "00:30"
|
|
||||||
end: "00:40"
|
|
||||||
- start: "00:50"
|
|
||||||
end: "01:00"
|
|
||||||
tags: []
|
|
||||||
response:
|
|
||||||
status_code: 400
|
|
||||||
|
|
||||||
- name: "[schedules_bad] post schedule with bad period (no start)"
|
|
||||||
request:
|
|
||||||
method: POST
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/"
|
|
||||||
json:
|
|
||||||
name: "i am invalid"
|
|
||||||
periods:
|
|
||||||
- end: "00:20"
|
|
||||||
- start: "00:30"
|
|
||||||
end: "00:40"
|
|
||||||
tags: []
|
|
||||||
response:
|
|
||||||
status_code: 400
|
|
||||||
|
|
||||||
- name: "[schedules_bad] post schedule with bad period (no end)"
|
|
||||||
request:
|
|
||||||
method: POST
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/"
|
|
||||||
json:
|
|
||||||
name: "i am invalid"
|
|
||||||
periods:
|
|
||||||
- start: "00:20"
|
|
||||||
- start: "00:30"
|
|
||||||
end: "00:40"
|
|
||||||
tags: []
|
|
||||||
response:
|
|
||||||
status_code: 400
|
|
||||||
|
|
||||||
- name: "[schedules_bad] post schedule with bad period (invalid start)"
|
|
||||||
request:
|
|
||||||
method: POST
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/"
|
|
||||||
json:
|
|
||||||
name: "i am invalid"
|
|
||||||
periods:
|
|
||||||
- start: "hello"
|
|
||||||
end: "00:20"
|
|
||||||
- start: "00:30"
|
|
||||||
end: "00:40"
|
|
||||||
tags: []
|
|
||||||
response:
|
|
||||||
status_code: 400
|
|
||||||
|
|
||||||
- name: "[schedules_bad] post schedule with bad period (invalid end)"
|
|
||||||
request:
|
|
||||||
method: POST
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/"
|
|
||||||
json:
|
|
||||||
name: "i am invalid"
|
|
||||||
periods:
|
|
||||||
- start: "12:10"
|
|
||||||
end: 1215
|
|
||||||
- start: "00:30"
|
|
||||||
end: "00:40"
|
|
||||||
tags: []
|
|
||||||
response:
|
|
||||||
status_code: 400
|
|
||||||
|
|
||||||
- name: "[schedules_bad] post schedule with bad period (invalid end 2)"
|
|
||||||
request:
|
|
||||||
method: POST
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/"
|
|
||||||
json:
|
|
||||||
name: "i am invalid"
|
|
||||||
periods:
|
|
||||||
- start: "12:10"
|
|
||||||
end: "25:90"
|
|
||||||
- start: "00:30"
|
|
||||||
end: "00:40"
|
|
||||||
tags: []
|
|
||||||
response:
|
|
||||||
status_code: 400
|
|
||||||
|
|
||||||
- name: "[schedules_bad] post schedule with bad periods (invalid list)"
|
|
||||||
request:
|
|
||||||
method: POST
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/"
|
|
||||||
json:
|
|
||||||
name: "i am nvalid"
|
|
||||||
periods: "not a list"
|
|
||||||
tags: []
|
|
||||||
response:
|
|
||||||
status_code: 400
|
|
||||||
|
|
||||||
- name: "[schedules_bad] post schedule with bad tags (one invalid)"
|
|
||||||
request:
|
|
||||||
method: POST
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/"
|
|
||||||
json:
|
|
||||||
name: "hello"
|
|
||||||
periods:
|
|
||||||
- start: "00:10"
|
|
||||||
end: "00:20"
|
|
||||||
- start: "00:30"
|
|
||||||
end: "00:40"
|
|
||||||
- start: "00:50"
|
|
||||||
end: "01:00"
|
|
||||||
tags:
|
|
||||||
- "valid_tag"
|
|
||||||
- 123
|
|
||||||
response:
|
|
||||||
status_code: 400
|
|
||||||
|
|
||||||
- name: "[schedules_bad] post schedule without tags"
|
|
||||||
request:
|
|
||||||
method: POST
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/"
|
|
||||||
json:
|
|
||||||
name: "hello"
|
|
||||||
periods:
|
|
||||||
- start: "00:10"
|
|
||||||
end: "00:20"
|
|
||||||
- start: "00:30"
|
|
||||||
end: "00:40"
|
|
||||||
- start: "00:50"
|
|
||||||
end: "01:00"
|
|
||||||
response:
|
|
||||||
status_code: 400
|
|
|
@ -1,108 +0,0 @@
|
||||||
test_name: "[tags] Test tagging of schedules and relays"
|
|
||||||
|
|
||||||
stages:
|
|
||||||
- name: "[tags] post schedule, expect it to be echoed back by tag"
|
|
||||||
request:
|
|
||||||
method: POST
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/"
|
|
||||||
json:
|
|
||||||
name: "test tagging schedule"
|
|
||||||
periods:
|
|
||||||
- start: "00:50"
|
|
||||||
end: "01:00"
|
|
||||||
tags:
|
|
||||||
- "test_tag_1"
|
|
||||||
response:
|
|
||||||
status_code: 201
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_schedule:single
|
|
||||||
function: validate_schedule:check_name
|
|
||||||
extra_kwargs:
|
|
||||||
name: "{tavern.request_vars.json.name}"
|
|
||||||
function: validate_schedule:check_periods
|
|
||||||
extra_kwargs:
|
|
||||||
periods: "{tavern.request_vars.json.periods}"
|
|
||||||
function: validate_schedule:check_tag
|
|
||||||
extra_kwargs:
|
|
||||||
tag: "{tavern.request_vars.json.tags[0]}"
|
|
||||||
save:
|
|
||||||
json:
|
|
||||||
returned_name: "name"
|
|
||||||
returned_id: "id"
|
|
||||||
returned_periods: "periods"
|
|
||||||
|
|
||||||
- name: "[tags] get schedule, check name and some periods"
|
|
||||||
request:
|
|
||||||
method: GET
|
|
||||||
url: "http://localhost:5000/api/v1/schedules/tag/test_tag_1"
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_schedule:multiple
|
|
||||||
function: validate_schedule:find
|
|
||||||
extra_kwargs:
|
|
||||||
id: "{returned_id}"
|
|
||||||
name: "{returned_name}"
|
|
||||||
periods: "{returned_periods}"
|
|
||||||
|
|
||||||
- name: "[tags] get controllers"
|
|
||||||
request:
|
|
||||||
method: GET
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/"
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_controller:multiple
|
|
||||||
save:
|
|
||||||
json:
|
|
||||||
returned_id: "[0].id"
|
|
||||||
|
|
||||||
- name: "[tags] set relay tag"
|
|
||||||
request:
|
|
||||||
method: PUT
|
|
||||||
url: "http://localhost:5000/api/v1/controllers/{returned_id}/relays/3"
|
|
||||||
json:
|
|
||||||
tags:
|
|
||||||
- "test_tag_1"
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_relay:single
|
|
||||||
function: validate_relay:check_controller_id
|
|
||||||
extra_kwargs:
|
|
||||||
name: "{returned_id}"
|
|
||||||
function: validate_relay:check_number
|
|
||||||
extra_kwargs:
|
|
||||||
number: 3
|
|
||||||
function: validate_relay:check_tag
|
|
||||||
extra_kwargs:
|
|
||||||
tag: "{tavern.request_vars.json.tags[0]}"
|
|
||||||
save:
|
|
||||||
json:
|
|
||||||
returned_name: "name"
|
|
||||||
returned_number: "number"
|
|
||||||
returned_tag: "tags[0]"
|
|
||||||
|
|
||||||
- name: "[tags] get relay, check name and number"
|
|
||||||
request:
|
|
||||||
method: GET
|
|
||||||
url: "http://localhost:5000/api/v1/relays/tag/{returned_tag}"
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_relay:multiple
|
|
||||||
function: validate_relay:find
|
|
||||||
extra_kwargs:
|
|
||||||
name: "{returned_name}"
|
|
||||||
number: !int "{returned_number:d}"
|
|
||||||
controller_id: "{returned_id}"
|
|
||||||
tag: "{returned_tag}"
|
|
||||||
|
|
||||||
- name: "[tags] get tags"
|
|
||||||
request:
|
|
||||||
method: GET
|
|
||||||
url: "http://localhost:5000/api/v1/tags/"
|
|
||||||
response:
|
|
||||||
status_code: 200
|
|
||||||
verify_response_with:
|
|
||||||
function: validate_tag:multiple
|
|
|
@ -1,45 +0,0 @@
|
||||||
import json
|
|
||||||
import validate_relay
|
|
||||||
|
|
||||||
def _verify_single(controller):
|
|
||||||
assert isinstance(controller.get("id"), str), "controller id is not a string"
|
|
||||||
assert isinstance(controller.get("name"), str), "controller name is not a string"
|
|
||||||
assert isinstance(controller.get("relay_count"), int), "controller relay_count is not an integer"
|
|
||||||
|
|
||||||
assert isinstance(controller.get("relays"), list), "controller relays is not a list"
|
|
||||||
assert len(controller.get("relays")) == controller.get("relay_count"), "controller relay have a length unequal to relay_count"
|
|
||||||
for relay in controller.get("relays"):
|
|
||||||
assert isinstance(relay, dict), "controller relays contain a relay which is not a dict"
|
|
||||||
validate_relay._verify_single(relay)
|
|
||||||
assert relay.get("controller_id") == controller.get("id")
|
|
||||||
|
|
||||||
def single(response):
|
|
||||||
_verify_single(response.json())
|
|
||||||
|
|
||||||
def multiple(response):
|
|
||||||
assert isinstance(response.json(), list), "response is not a list"
|
|
||||||
for controller in response.json():
|
|
||||||
_verify_single(controller)
|
|
||||||
|
|
||||||
def check_id(response, id):
|
|
||||||
assert response.json().get("id") == id, "controller id check failed"
|
|
||||||
|
|
||||||
def check_name(response, name):
|
|
||||||
assert response.json().get("name") == name, "controller name check failed"
|
|
||||||
|
|
||||||
def check_ip(response, ip):
|
|
||||||
assert response.json().get("ip") == ip, "controller ip check failed"
|
|
||||||
|
|
||||||
def find(response, id=None, name=None):
|
|
||||||
print(response.json())
|
|
||||||
for controller in response.json():
|
|
||||||
if id != None and id != controller.get("id"):
|
|
||||||
print(controller.get("id"))
|
|
||||||
continue
|
|
||||||
|
|
||||||
if name != None and name != controller.get("name"):
|
|
||||||
print(controller.get("name"))
|
|
||||||
continue
|
|
||||||
return
|
|
||||||
assert False, "controller not found in list"
|
|
||||||
|
|
|
@ -1,68 +0,0 @@
|
||||||
import json
|
|
||||||
import validate_schedule
|
|
||||||
|
|
||||||
def _verify_single(relay):
|
|
||||||
assert isinstance(relay.get("number"), int), "relay number is not an integer"
|
|
||||||
assert isinstance(relay.get("name"), str), "relay name is not a string"
|
|
||||||
assert isinstance(relay.get("controller_id"), str), "relay controller_id is not a string"
|
|
||||||
|
|
||||||
assert isinstance(relay.get("active_schedule"), dict), "relay active_schedule is not a dict"
|
|
||||||
validate_schedule._verify_single(relay.get("active_schedule"))
|
|
||||||
|
|
||||||
assert isinstance(relay.get("schedules"), list), "relay schedules is not a list"
|
|
||||||
assert len(relay.get("schedules")) == 7, "relay schedule have a length unequal to 7"
|
|
||||||
for schedule in relay.get("schedules"):
|
|
||||||
assert isinstance(relay, dict), "relay schedules contain a schedule which is not a dict"
|
|
||||||
validate_schedule._verify_single(schedule)
|
|
||||||
|
|
||||||
assert isinstance(relay.get("tags"), list), "relay tags is not a list"
|
|
||||||
for tag in relay.get("tags"):
|
|
||||||
assert isinstance(tag, str), "relay tags contain a tag which is not a string"
|
|
||||||
|
|
||||||
def single(response):
|
|
||||||
_verify_single(response.json())
|
|
||||||
|
|
||||||
def multiple(response):
|
|
||||||
assert isinstance(response.json(), list), "response is not a list"
|
|
||||||
for relay in response.json():
|
|
||||||
_verify_single(relay)
|
|
||||||
|
|
||||||
def relay_count(response, relay_count):
|
|
||||||
assert len(response.json()) == relay_count, "response has invalid length"
|
|
||||||
|
|
||||||
def check_number(response, number):
|
|
||||||
assert response.json().get("number") == number, "relay number check failed"
|
|
||||||
|
|
||||||
def check_name(response, name):
|
|
||||||
assert response.json().get("name") == name, "relay name check failed"
|
|
||||||
|
|
||||||
def check_controller_id(response, controller_id):
|
|
||||||
assert response.json().get("controller_id") == controller_id, "relay controller_id check failed"
|
|
||||||
|
|
||||||
def check_tag(response, tag):
|
|
||||||
for response_tag in response.json().get("tags"):
|
|
||||||
if response_tag == tag:
|
|
||||||
return
|
|
||||||
assert False, "tag not found in relay,"
|
|
||||||
|
|
||||||
def find(response, name=None, number=None, controller_id=None, tag=None):
|
|
||||||
print(response.json())
|
|
||||||
for relay in response.json():
|
|
||||||
if number != None and number != relay.get("number"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if name != None and name != relay.get("name"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if controller_id != None and controller_id != relay.get("controller_id"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if tag != None:
|
|
||||||
found_in_response = False
|
|
||||||
for response_tag in relay.get("tags"):
|
|
||||||
if response_tag == tag:
|
|
||||||
found_in_response = True
|
|
||||||
if not found_in_response:
|
|
||||||
continue
|
|
||||||
return
|
|
||||||
assert False, "relay not found in list"
|
|
|
@ -1,97 +0,0 @@
|
||||||
import json
|
|
||||||
|
|
||||||
def _verify_single(schedule):
|
|
||||||
assert isinstance(schedule.get("id"), str), "schedule ID is not a string"
|
|
||||||
assert isinstance(schedule.get("name"), str), "schedule name is not a string"
|
|
||||||
|
|
||||||
assert isinstance(schedule.get("periods"), list), "schedule periods is not a list"
|
|
||||||
for period in schedule.get("periods"):
|
|
||||||
assert isinstance(period, dict), "schedule periods contain a periods which is not a dict"
|
|
||||||
assert isinstance(period.get("start"), str), "schedule periods contain a periods with start not being a string"
|
|
||||||
assert isinstance(period.get("end"), str), "schedule periods contain a periods with end not being a string"
|
|
||||||
|
|
||||||
assert isinstance(schedule.get("tags"), list), "schedule tags is not a list"
|
|
||||||
for tag in schedule.get("tags"):
|
|
||||||
assert isinstance(tag, str), "schedule tags contain a tag which is not a string"
|
|
||||||
|
|
||||||
def single(response):
|
|
||||||
_verify_single(response.json())
|
|
||||||
|
|
||||||
def multiple(response):
|
|
||||||
assert isinstance(response.json(), list), "response is not a list"
|
|
||||||
for schedule in response.json():
|
|
||||||
_verify_single(schedule)
|
|
||||||
|
|
||||||
def check_name(response, name):
|
|
||||||
response_name = response.json().get("name")
|
|
||||||
assert response_name == name, f"schedule name check failed (expected: '{name}'; actual: '{response_name}')"
|
|
||||||
|
|
||||||
def check_id(response, id):
|
|
||||||
assert response.json().get("id") == id, "schedule id check failed"
|
|
||||||
|
|
||||||
def check_periods(response, periods):
|
|
||||||
periods_json = json.loads(periods.replace("'", "\""))
|
|
||||||
assert len(periods_json) == len(response.json().get("periods")), "periods in response and request have different lengths"
|
|
||||||
for request_period in periods_json:
|
|
||||||
found_in_response = False
|
|
||||||
for response_period in response.json().get("periods"):
|
|
||||||
if response_period.get("start") != request_period.get("start"):
|
|
||||||
continue
|
|
||||||
if response_period.get("end") != request_period.get("end"):
|
|
||||||
continue
|
|
||||||
found_in_response = True
|
|
||||||
if not found_in_response:
|
|
||||||
print(request_period)
|
|
||||||
assert False, "a period from the request was missing from the response"
|
|
||||||
|
|
||||||
def check_tag(response, tag):
|
|
||||||
for response_tag in response.json().get("tags"):
|
|
||||||
if response_tag == tag:
|
|
||||||
return
|
|
||||||
assert False, "tag not found in schedule,"
|
|
||||||
|
|
||||||
def compare_off(response):
|
|
||||||
assert response.json().get("id") == "off", "schedule off did not return id off"
|
|
||||||
assert len(response.json().get("periods")) == 0, "schedule off has periods"
|
|
||||||
|
|
||||||
def compare_on(response):
|
|
||||||
assert response.json().get("id") == "on", "schedule on did not return id on"
|
|
||||||
assert len(response.json().get("periods")) == 1, "schedule on has unexpected amount of periods"
|
|
||||||
assert response.json().get("periods")[0].get("start") == "00:00", "Schedule on has unexpected start"
|
|
||||||
assert response.json().get("periods")[0].get("end") == "00:00", "Schedule on has unexpected start"
|
|
||||||
|
|
||||||
def find(response, id=None, name=None, periods=None, tag=None):
|
|
||||||
if periods != None:
|
|
||||||
periods_json = json.loads(periods.replace("'", "\""))
|
|
||||||
for schedule in response.json():
|
|
||||||
if id != None and id != schedule.get("id"):
|
|
||||||
print(schedule.get("id"))
|
|
||||||
continue
|
|
||||||
|
|
||||||
if name != None and name != schedule.get("name"):
|
|
||||||
print(schedule.get("name"))
|
|
||||||
continue
|
|
||||||
|
|
||||||
if periods != None:
|
|
||||||
if len(periods_json) != len(schedule.get("periods")):
|
|
||||||
continue
|
|
||||||
for request_period in periods_json:
|
|
||||||
found_in_response = False
|
|
||||||
for response_period in schedule.get("periods"):
|
|
||||||
if response_period.get("start") != request_period.get("start"):
|
|
||||||
continue
|
|
||||||
if response_period.get("end") != request_period.get("end"):
|
|
||||||
continue
|
|
||||||
found_in_response = True
|
|
||||||
if not found_in_response:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if tag != None:
|
|
||||||
found_in_response = False
|
|
||||||
for response_tag in schedule.get("tags"):
|
|
||||||
if response_tag == tag:
|
|
||||||
found_in_response = True
|
|
||||||
if not found_in_response:
|
|
||||||
continue
|
|
||||||
return
|
|
||||||
assert False, "schedule not found in list"
|
|
|
@ -1,34 +0,0 @@
|
||||||
import json
|
|
||||||
|
|
||||||
def _verify_single(tag):
|
|
||||||
assert isinstance(tag, str), "tag is not a string"
|
|
||||||
|
|
||||||
def single(response):
|
|
||||||
_verify_single(response.json())
|
|
||||||
|
|
||||||
def multiple(response):
|
|
||||||
assert isinstance(response.json(), list), "response is not a list"
|
|
||||||
for tag in response.json():
|
|
||||||
_verify_single(tag)
|
|
||||||
|
|
||||||
#def find(response, name=None, number=None, controller_id=None, tag=None):
|
|
||||||
# print(response.json())
|
|
||||||
# for tag in response.json():
|
|
||||||
# if number != None and number != tag.get("number"):
|
|
||||||
# continue
|
|
||||||
#
|
|
||||||
# if name != None and name != tag.get("name"):
|
|
||||||
# continue
|
|
||||||
#
|
|
||||||
# if controller_id != None and controller_id != tag.get("controller_id"):
|
|
||||||
# continue
|
|
||||||
#
|
|
||||||
# if tag != None:
|
|
||||||
# found_in_response = False
|
|
||||||
# for response_tag in tag.get("tags"):
|
|
||||||
# if response_tag == tag:
|
|
||||||
# found_in_response = True
|
|
||||||
# if not found_in_response:
|
|
||||||
# continue
|
|
||||||
# return
|
|
||||||
# assert False, "tag not found in list"
|
|
Loading…
Reference in a new issue