Split project (keep controller)
Some checks failed
/ build-artifacts (arm-unknown-linux-gnueabihf) (push) Failing after 9m37s

This commit is contained in:
Tobias Reisinger 2024-04-30 10:38:13 +02:00
parent 9bc75b9627
commit f4fbc95500
Signed by: serguzim
GPG key ID: 13AD60C237A28DFE
128 changed files with 37 additions and 5479 deletions

View file

@ -1,3 +0,0 @@
#EMGAUWA_CONTROLLER__LOGGING__LEVEL=DEBUG
#EMGAUWA_CORE__LOGGING__LEVEL=DEBUG

View file

@ -1,7 +1,32 @@
[workspace]
resolver = "2"
members = [
"emgauwa-core",
"emgauwa-controller",
"emgauwa-common",
]
[package]
name = "emgauwa-controller"
version = "0.5.0"
edition = "2021"
authors = ["Tobias Reisinger <tobias@msrg.cc>"]
[dependencies]
emgauwa-common = { git = "https://git.serguzim.me/emgauwa/common.git" }
actix = "0.13"
tokio = { version = "1.34", features = ["io-std", "macros", "rt-multi-thread"] }
tokio-tungstenite = "0.21"
simple_logger = "4.3"
log = "0.4"
chrono = { version = "0.4", features = ["serde"] }
uuid = { version = "1.5", features = ["serde", "v4"] }
serde = "1.0"
serde_json = "1.0"
serde_derive = "1.0"
sqlx = { version = "0.7", features = ["sqlite", "runtime-tokio", "macros", "chrono"] }
futures = "0.3"
futures-channel = "0.3"
rppal = "0.17"
rppal-pfd = "0.0.5"
rppal-mcp23s17 = "0.0.3"

View file

@ -1,5 +0,0 @@
[build]
pre-build = [
"curl -Lo /usr/bin/yq https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64",
"chmod +x /usr/bin/yq"
]

View file

@ -1,42 +1,23 @@
sqlx:
cargo sqlx database drop -y
cargo sqlx database create
cargo sqlx migrate run
cargo sqlx prepare --workspace
build-rpi:
cross build --target arm-unknown-linux-gnueabihf
emgauwa-%.json: config/%.pkl
emgauwa-controller.json: controller.pkl
pkl eval -f json -o $@ $<
configs:
$(MAKE) emgauwa-core.json
config:
$(MAKE) emgauwa-controller.json
clean:
rm -f emgauwa-controller.json
rm -f emgauwa-controller.sqlite
rm -f emgauwa-core.json
rm -f emgauwa-core.sqlite
rm -f emgauwa-dev.sqlite
emgauwa-controller_%:
$(TOOL) build --target $* --release --bin emgauwa-controller
mkdir -p out/releases
cp target/$*/release/emgauwa-controller out/releases/emgauwa-controller_$*
emgauwa-core_%:
$(TOOL) build --target $* --release --bin emgauwa-core
mkdir -p out/releases
cp target/$*/release/emgauwa-core out/releases/emgauwa-core_$*
emgauwa_%:
$(MAKE) emgauwa-controller_$*
$(MAKE) emgauwa-core_$*
releases:
$(MAKE) TOOL=cross emgauwa_arm-unknown-linux-gnueabihf
$(MAKE) TOOL=cargo emgauwa_x86_64-unknown-linux-gnu
$(MAKE) TOOL=cross emgauwa_x86_64-unknown-linux-musl
$(MAKE) TOOL=cross emgauwa-controller_arm-unknown-linux-gnueabihf
$(MAKE) TOOL=cargo emgauwa-controller_x86_64-unknown-linux-gnu
$(MAKE) TOOL=cross emgauwa-controller_x86_64-unknown-linux-musl

View file

@ -1,875 +0,0 @@
openapi: 3.0.0
info:
contact:
name: Tobias Reisinger
url: 'https://git.serguzim.me/emgauwa/'
title: Emgauwa API v1
version: 0.5.0
description: Server API to manage an Emgauwa system.
servers:
- url: 'http://localhost:4419'
tags:
- name: schedules
- name: relays
- name: controllers
- name: tags
- name: macros
- name: websocket
paths:
/api/v1/schedules:
get:
summary: get all schedules
description: Receive a list with all available schedules.
tags:
- schedules
responses:
'200':
description: OK
headers: { }
content:
application/json:
schema:
type: array
items:
$ref: '#/components/schemas/schedule'
operationId: get-api-v1-schedules
post:
summary: add new schedule
tags:
- schedules
responses:
'201':
description: Created
content:
application/json:
schema:
$ref: '#/components/schemas/schedule'
operationId: post-api-v1-schedules
description: Create a new schedule. A new unique id will be returned
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/schedule'
description: The "id" field will be set by the server.
parameters: [ ]
'/api/v1/schedules/{schedule_id}':
parameters:
- schema:
type: string
format: uuid
name: schedule_id
in: path
required: true
description: ''
get:
summary: get single schedule
tags:
- schedules
responses:
'200':
description: OK
content:
application/json:
schema:
$ref: '#/components/schemas/schedule'
'404':
description: Not Found
content:
application/json:
schema:
type: object
properties: { }
operationId: get-schedules-schedule_id
description: Return a single schedule by id.
put:
summary: overwrite single schedule
tags:
- schedules
responses:
'200':
description: OK
content:
application/json:
schema:
$ref: '#/components/schemas/schedule'
'400':
description: Bad Request
content:
application/json:
schema:
type: object
properties: { }
'404':
description: Not Found
content:
application/json:
schema:
type: object
properties: { }
operationId: put-schedules-schedule_id
requestBody:
content:
application/json:
schema:
type: object
properties:
name:
type: string
periods:
type: array
items:
$ref: '#/components/schemas/period'
tags:
type: array
items:
$ref: '#/components/schemas/tag'
description: ''
parameters: [ ]
description: Overwrite the properties for a single schedule. Overwriting periods on "on" or "off" will fail.
delete:
summary: delete single schedule
tags:
- schedules
responses:
'200':
description: OK
'403':
description: Forbidden
'404':
description: Not Found
operationId: delete-schedules-schedule_id
description: Deletes a single schedule. Deleting "on" or "off" is forbidden (403).
'/api/v1/schedules/tag/{tag}':
parameters:
- schema:
type: string
name: tag
in: path
required: true
description: ''
get:
summary: get schedules by tag
tags:
- schedules
responses:
'200':
description: OK
content:
application/json:
schema:
type: array
items:
$ref: '#/components/schemas/schedule'
operationId: get-schedules-tag-schedule_id
description: Receive a list of schedules which include the given tag.
/api/v1/relays:
get:
summary: get all relays
tags:
- relays
responses:
'200':
description: OK
headers: { }
content:
application/json:
schema:
type: array
items:
$ref: '#/components/schemas/relay'
operationId: get-relays
description: Return a list with all relays.
parameters: [ ]
'/api/v1/relays/tag/{tag}':
parameters:
- schema:
type: string
name: tag
in: path
required: true
get:
summary: get relays by tag
tags:
- relays
responses:
'200':
description: OK
content:
application/json:
schema:
type: array
items:
$ref: '#/components/schemas/relay'
operationId: get-relays-tag-tag
description: Return all relays with the given tag.
/api/v1/controllers:
get:
summary: get all controllers
tags:
- controllers
responses:
'200':
description: OK
content:
application/json:
schema:
$ref: '#/components/schemas/controller'
operationId: get-controllers
description: Return all controllers.
parameters: [ ]
'/api/v1/controllers/{controller_id}':
parameters:
- schema:
type: string
name: controller_id
in: path
description: ''
required: true
get:
summary: get single controller
tags:
- controllers
responses:
'200':
description: OK
content:
application/json:
schema:
$ref: '#/components/schemas/controller'
'404':
description: Not Found
content:
application/json:
schema:
type: object
properties: { }
operationId: get-controllers-controller_id
requestBody:
content:
application/json:
schema:
type: object
properties: { }
description: ''
description: Return a single controller by id. When no controller with the id is found 404 will be returned.
put:
summary: overwrite single controller
tags:
- controllers
responses:
'200':
description: OK
content:
application/json:
schema:
$ref: '#/components/schemas/controller'
'400':
description: Bad Request
content:
application/json:
schema:
type: object
properties: { }
'404':
description: Not Found
content:
application/json:
schema:
type: object
properties: { }
operationId: put-controllers-controller_id
requestBody:
content:
application/json:
schema:
type: object
properties:
name:
type: string
ip:
type: string
format: ipv4
description: Overwrite properties of a single controller.
delete:
summary: delete single controller
tags:
- controllers
responses:
'200':
description: OK
'404':
description: Not Found
operationId: delete-controllers-controller_id
description: Delete a single controller. To recover the controller you need to use the controllers/discover feature.
'/api/v1/controllers/{controller_id}/relays':
parameters:
- schema:
type: string
name: controller_id
in: path
required: true
get:
summary: get all relays for single controller
tags:
- controllers
- relays
responses:
'200':
description: OK
headers: { }
content:
application/json:
schema:
type: array
items:
$ref: '#/components/schemas/relay'
'404':
description: Not Found
content:
application/json:
schema:
type: array
items: { }
operationId: get-controllers-controller_id-relays
description: Returns all relays for a single controller.
'/api/v1/controllers/{controller_id}/relays/{relay_num}':
parameters:
- schema:
type: string
name: controller_id
in: path
required: true
- schema:
type: integer
name: relay_num
in: path
required: true
get:
summary: get single relay for single controller
tags:
- controllers
- relays
responses:
'200':
description: OK
content:
application/json:
schema:
$ref: '#/components/schemas/relay'
'404':
description: Not Found
content:
application/json:
schema:
type: object
properties: { }
operationId: get-controllers-controller_id-relays-relay_num
description: 'Return a single relay by number for a controller by id. When the relay or controller is not found, 404 will be returned.'
put:
summary: overwrite single relay for single controller
tags:
- controllers
- relays
responses:
'200':
description: OK
content:
application/json:
schema:
$ref: '#/components/schemas/relay'
'400':
description: Bad Request
content:
application/json:
schema:
type: object
properties: { }
'404':
description: Not Found
content:
application/json:
schema:
type: object
properties: { }
operationId: put-controllers-controller_id-relays-relay_num
requestBody:
content:
application/json:
schema:
type: object
properties:
name:
type: string
active_schedule:
type: object
properties:
id:
$ref: '#/components/schemas/schedule_id'
schedules:
type: array
maxItems: 7
minItems: 7
items:
type: object
properties:
id:
$ref: '#/components/schemas/schedule_id'
tags:
type: array
items:
$ref: '#/components/schemas/tag'
description: 'active schedule will overwrite schedules[weekday]'
/api/v1/tags:
get:
summary: get all tags
tags:
- tags
responses:
'200':
description: OK
content:
application/json:
schema:
type: array
items:
$ref: '#/components/schemas/tag'
operationId: get-tags
description: Returns a list of tags.
parameters: [ ]
post:
summary: add new tag
operationId: post-api-v1-tags
responses:
'201':
description: Created
content:
application/json:
schema:
$ref: '#/components/schemas/tag_full'
'400':
description: Bad Request
requestBody:
content:
application/json:
schema:
type: object
properties:
tag:
$ref: '#/components/schemas/tag'
description: ''
tags:
- tags
description: Add a new tag. Will return 400 when the tag already exits.
'/api/v1/tags/{tag}':
parameters:
- schema:
type: string
name: tag
in: path
required: true
get:
summary: get relays and schedules for tag
tags:
- tags
responses:
'200':
description: OK
content:
application/json:
schema:
$ref: '#/components/schemas/tag_full'
'404':
description: Not Found
operationId: get-tags-tag
description: Return all models with the given tag (relays and schedules)
delete:
summary: delete tag
operationId: delete-tags-tag
responses:
'200':
description: OK
'404':
description: Not Found
description: delete tag from database and from affected relays and schedules
tags:
- tags
'/api/v1/controllers/{controller_id}/relays/{relay_num}/pulse':
parameters:
- schema:
type: string
name: controller_id
in: path
required: true
- schema:
type: string
name: relay_num
in: path
required: true
post:
summary: pulse relay on
responses:
'200':
description: OK
'404':
description: Not Found
operationId: post-controllers-controller_id-relays-relay_num-pulse
requestBody:
content:
application/json:
schema:
type: object
properties:
duration:
type: integer
description: ''
description: Turn a relay on for a short amount of time. The duration can be set in the body in seconds. When no duration is supplied the default for the relay will be used. The default is read from the controller's config.
tags:
- controllers
- relays
/api/v1/ws/relays:
get:
summary: get relay status updates
tags:
- websocket
responses:
'200':
description: OK
content:
application/json:
schema:
type: array
items:
$ref: '#/components/schemas/relay'
operationId: get-ws-relays
description: |-
WEBSOCKET
This websocket will send all relays with the most recent status every 10 seconds.
parameters: [ ]
/api/v1/macros:
get:
summary: get all macros
tags:
- macros
responses:
'200':
description: OK
content:
application/json:
schema:
type: array
items:
$ref: '#/components/schemas/macro'
operationId: get-api-v1-macros
description: Receive a list with all available macros.
post:
summary: add new macro
tags:
- macros
responses:
'201':
description: Created
operationId: post-api-v1-macros
description: Create a new macro. A new unique id will be returned
requestBody:
content:
application/json:
schema:
type: object
properties:
name:
type: string
actions:
type: array
items:
type: object
properties:
weekday:
type: integer
minimum: 0
maximum: 6
relay:
type: object
properties:
number:
type: integer
controller_id:
$ref: '#/components/schemas/controller_id'
schedule:
type: object
properties:
id:
$ref: '#/components/schemas/schedule_id'
'/api/v1/macros/{macro_id}':
parameters:
- schema:
type: string
name: macro_id
in: path
required: true
get:
summary: get a single macro
tags:
- macros
responses:
'200':
description: OK
content:
application/json:
schema:
$ref: '#/components/schemas/macro'
operationId: get-api-v1-macros-macro_id
description: Return a single macro by id. When no macro with the id is found 404 will be returned.
put:
summary: overwrite a macro
tags:
- macros
responses:
'200':
description: OK
content:
application/json:
schema:
$ref: '#/components/schemas/macro'
operationId: put-api-v1-macros-macro_id
description: Overwrite properties of a single macro.
requestBody:
content:
application/json:
schema:
type: object
properties:
name:
type: string
actions:
type: array
items:
type: object
properties:
weekday:
type: integer
minimum: 0
maximum: 6
schedule:
type: object
properties:
id:
$ref: '#/components/schemas/schedule_id'
relay:
type: object
properties:
number:
type: integer
controller_id:
$ref: '#/components/schemas/controller_id'
delete:
summary: delete a macro
tags:
- macros
responses:
'200':
description: OK
operationId: delete-api-v1-macros-macro_id
description: Delete a single macro.
'/api/v1/macros/{macro_id}/execute':
parameters:
- schema:
type: string
name: macro_id
in: path
required: true
- schema:
type: integer
name: weekday
in: query
put:
summary: execute a macro
tags:
- macros
responses:
'200':
description: OK
'404':
description: Not Found
operationId: put-api-v1-macros-macro_id-execute
description: Execute a macro
/api/v1/schedules/list:
post:
summary: add new schedule list
tags:
- schedules
responses:
'200':
description: OK
operationId: post-schedules-list
requestBody:
content:
application/json:
schema:
type: array
items:
$ref: '#/components/schemas/schedule'
description: Create a list of schedules
parameters: [ ]
components:
schemas:
controller:
title: controller
type: object
properties:
id:
$ref: '#/components/schemas/controller_id'
name:
type: string
example: Garden Controller
ip:
type: string
format: ipv4
example: 224.73.153.12
active:
type: boolean
port:
type: integer
example: 27480
relay_count:
type: integer
minimum: 0
example: 10
relays:
type: array
items:
$ref: '#/components/schemas/relay'
relay:
title: relay
type: object
properties:
number:
type: integer
minimum: 0
example: 3
name:
type: string
example: Sprinkling System 1
controller_id:
$ref: '#/components/schemas/controller_id'
active_schedule:
$ref: '#/components/schemas/schedule-untagged'
schedules:
type: array
maxItems: 7
minItems: 7
items:
$ref: '#/components/schemas/schedule-untagged'
tags:
type: array
items:
$ref: '#/components/schemas/tag'
is_on:
type: boolean
description: NULL when unknown
schedule-untagged:
title: schedule
type: object
description: ''
properties:
id:
$ref: '#/components/schemas/schedule_id'
name:
type: string
example: Sprinkler Sunny Day
periods:
type: array
items:
$ref: '#/components/schemas/period'
schedule:
title: schedule
type: object
description: ''
properties:
id:
$ref: '#/components/schemas/schedule_id'
name:
type: string
example: Sprinkler Sunny Day
periods:
type: array
items:
$ref: '#/components/schemas/period'
tags:
type: array
items:
$ref: '#/components/schemas/tag'
period:
title: period
type: object
properties:
start:
type: string
example: '10:15'
format: 24-hour
end:
type: string
format: 24-hour
example: '14:45'
required:
- start
- end
controller_id:
type: string
title: controller_id
format: uuid
example: 589c0eab-a4b4-4f3a-be97-cf03b1dc8edc
tag:
type: string
title: tag
example: sprinkler
tag_full:
title: tag (full)
type: object
properties:
tag:
$ref: '#/components/schemas/tag'
relays:
type: array
items:
$ref: '#/components/schemas/relay'
schedules:
type: array
items:
$ref: '#/components/schemas/schedule'
schedule_id:
type: string
title: schedule_id
format: uuid
example: 6bceb29b-7d2e-4af3-a26e-11f514dc5cc1
macro:
title: macro
type: object
properties:
id:
type: string
format: uuid
example: a9a4eab4-6c54-4fe4-b755-bdb2a90b3242
name:
type: string
actions:
type: array
items:
$ref: '#/components/schemas/macro_action'
macro_action:
title: macro_action
type: object
description: ''
properties:
weekday:
type: integer
minimum: 0
maximum: 6
schedule:
$ref: '#/components/schemas/schedule'
relay:
$ref: '#/components/schemas/relay'
required:
- weekday
- schedule
- relay

View file

@ -1 +0,0 @@
amends "package://emgauwa.app/pkl/emgauwa@0.1.0#/core.pkl"

View file

@ -1,32 +0,0 @@
[package]
name = "emgauwa-common"
version = "0.5.0"
edition = "2021"
authors = ["Tobias Reisinger <tobias@msrg.cc>"]
[dependencies]
actix = "0.13"
actix-web = "4.4"
actix-web-actors = "4.2"
serde = "1.0"
serde_json = "1.0"
serde_derive = "1.0"
simple_logger = "4.2"
log = "0.4"
config = "0.13"
chrono = { version = "0.4", features = ["serde"] }
sqlx = { version = "0.7", features = ["sqlite", "runtime-tokio", "macros", "chrono"] }
libsqlite3-sys = { version = "*", features = ["bundled"] }
uuid = "1.6"
futures = "0.3"
libc = "0.2"
rppal = "0.17"
rppal-pfd = "0.0.5"
rppal-mcp23s17 = "0.0.3"

View file

@ -1,3 +0,0 @@
fn main() {
println!("cargo:rerun-if-changed=migrations");
}

View file

@ -1,10 +0,0 @@
use std::time::Duration;
pub const DEFAULT_PORT: u16 = 4419;
pub const HEARTBEAT_INTERVAL: Duration = Duration::from_secs(5);
pub const HEARTBEAT_TIMEOUT: Duration = Duration::from_secs(15);
pub const WEBSOCKET_RETRY_TIMEOUT: Duration = Duration::from_secs(5);
pub const RELAYS_RETRY_TIMEOUT: Duration = Duration::from_secs(5);
pub const RELAY_PULSE_DURATION: u64 = 3;

View file

@ -1,184 +0,0 @@
use std::ops::DerefMut;
use serde_derive::{Deserialize, Serialize};
use sqlx::pool::PoolConnection;
use sqlx::Sqlite;
use crate::db::{DbRelay, DbTag};
use crate::errors::DatabaseError;
use crate::types::EmgauwaUid;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DbController {
#[serde(skip)]
pub id: i64,
#[serde(rename = "id")]
pub uid: EmgauwaUid,
pub name: String,
pub relay_count: i64,
pub active: bool,
}
impl DbController {
pub async fn get_all(
conn: &mut PoolConnection<Sqlite>,
) -> Result<Vec<DbController>, DatabaseError> {
sqlx::query_as!(DbController, "SELECT * FROM controllers")
.fetch_all(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn get(
conn: &mut PoolConnection<Sqlite>,
id: i64,
) -> Result<Option<DbController>, DatabaseError> {
sqlx::query_as!(DbController, "SELECT * FROM controllers WHERE id = ?", id)
.fetch_optional(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn get_by_uid(
conn: &mut PoolConnection<Sqlite>,
filter_uid: &EmgauwaUid,
) -> Result<Option<DbController>, DatabaseError> {
sqlx::query_as!(
DbController,
"SELECT * FROM controllers WHERE uid = ?",
filter_uid
)
.fetch_optional(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn get_by_uid_or_create(
conn: &mut PoolConnection<Sqlite>,
uid: &EmgauwaUid,
new_name: &str,
new_relay_count: i64,
) -> Result<DbController, DatabaseError> {
match DbController::get_by_uid(conn, uid).await? {
Some(tag) => Ok(tag),
None => DbController::create(conn, uid, new_name, new_relay_count).await,
}
}
pub async fn get_by_tag(
conn: &mut PoolConnection<Sqlite>,
tag: &DbTag,
) -> Result<Vec<DbController>, DatabaseError> {
sqlx::query_as!(DbController, "SELECT schedule.* FROM controllers AS schedule INNER JOIN junction_tag ON junction_tag.schedule_id = schedule.id WHERE junction_tag.tag_id = ?", tag.id)
.fetch_all(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn delete_by_uid(
conn: &mut PoolConnection<Sqlite>,
filter_uid: EmgauwaUid,
) -> Result<(), DatabaseError> {
if sqlx::query_scalar!("SELECT 1 FROM controllers WHERE uid = ?", filter_uid)
.fetch_optional(conn.deref_mut())
.await?
.is_none()
{
return Err(DatabaseError::NotFound);
}
sqlx::query!("DELETE FROM controllers WHERE uid = ?", filter_uid)
.execute(conn.deref_mut())
.await
.map(|res| match res.rows_affected() {
0 => Err(DatabaseError::DeleteError),
_ => Ok(()),
})?
}
pub async fn create(
conn: &mut PoolConnection<Sqlite>,
new_uid: &EmgauwaUid,
new_name: &str,
new_relay_count: i64,
) -> Result<DbController, DatabaseError> {
sqlx::query_as!(
DbController,
"INSERT INTO controllers (uid, name, relay_count) VALUES (?, ?, ?) RETURNING *",
new_uid,
new_name,
new_relay_count,
)
.fetch_optional(conn.deref_mut())
.await?
.ok_or(DatabaseError::InsertGetError)
}
pub async fn update(
&self,
conn: &mut PoolConnection<Sqlite>,
new_name: &str,
new_relay_count: i64,
) -> Result<DbController, DatabaseError> {
sqlx::query!(
"UPDATE controllers SET name = ?, relay_count = ? WHERE id = ?",
new_name,
new_relay_count,
self.id,
)
.execute(conn.deref_mut())
.await?;
Self::get(conn, self.id)
.await?
.ok_or(DatabaseError::UpdateGetError)
}
pub async fn update_active(
&self,
conn: &mut PoolConnection<Sqlite>,
new_active: bool,
) -> Result<DbController, DatabaseError> {
sqlx::query!(
"UPDATE controllers SET active = ? WHERE id = ?",
new_active,
self.id,
)
.execute(conn.deref_mut())
.await?;
Self::get(conn, self.id)
.await?
.ok_or(DatabaseError::UpdateGetError)
}
pub async fn get_relays(
&self,
conn: &mut PoolConnection<Sqlite>,
) -> Result<Vec<DbRelay>, DatabaseError> {
sqlx::query_as!(
DbRelay,
"SELECT * FROM relays WHERE controller_id = ?",
self.id
)
.fetch_all(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn all_inactive(conn: &mut PoolConnection<Sqlite>) -> Result<(), DatabaseError> {
sqlx::query!("UPDATE controllers SET active = 0")
.execute(conn.deref_mut())
.await?;
Ok(())
}
pub async fn reload(
&self,
conn: &mut PoolConnection<Sqlite>,
) -> Result<DbController, DatabaseError> {
Self::get(conn, self.id)
.await?
.ok_or(DatabaseError::NotFound)
}
}

View file

@ -1,146 +0,0 @@
use std::ops::DerefMut;
use sqlx::pool::PoolConnection;
use sqlx::Sqlite;
use crate::db::{DbRelay, DbSchedule};
use crate::errors::DatabaseError;
use crate::types::Weekday;
pub struct DbJunctionRelaySchedule {
pub id: i64,
pub weekday: Weekday,
pub relay_id: i64,
pub schedule_id: i64,
}
impl DbJunctionRelaySchedule {
pub async fn get(
conn: &mut PoolConnection<Sqlite>,
id: i64,
) -> Result<Option<DbJunctionRelaySchedule>, DatabaseError> {
sqlx::query_as!(
DbJunctionRelaySchedule,
"SELECT * FROM junction_relay_schedule WHERE id = ?",
id
)
.fetch_optional(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn get_junction_by_relay_and_weekday(
conn: &mut PoolConnection<Sqlite>,
relay: &DbRelay,
weekday: Weekday,
) -> Result<Option<DbJunctionRelaySchedule>, DatabaseError> {
sqlx::query_as!(
DbJunctionRelaySchedule,
"SELECT * FROM junction_relay_schedule WHERE relay_id = ? AND weekday = ?",
relay.id,
weekday
)
.fetch_optional(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn get_relays(
conn: &mut PoolConnection<Sqlite>,
schedule: &DbSchedule,
) -> Result<Vec<DbRelay>, DatabaseError> {
sqlx::query_as!(
DbRelay,
r#"SELECT relays.* FROM relays INNER JOIN junction_relay_schedule
ON junction_relay_schedule.relay_id = relays.id
WHERE junction_relay_schedule.schedule_id = ?
ORDER BY junction_relay_schedule.weekday"#,
schedule.id
)
.fetch_all(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn get_schedule(
conn: &mut PoolConnection<Sqlite>,
relay: &DbRelay,
weekday: Weekday,
) -> Result<Option<DbSchedule>, DatabaseError> {
sqlx::query_as!(
DbSchedule,
r#"SELECT schedules.* FROM schedules INNER JOIN junction_relay_schedule
ON junction_relay_schedule.schedule_id = schedules.id
WHERE junction_relay_schedule.relay_id = ? AND junction_relay_schedule.weekday = ?"#,
relay.id,
weekday
)
.fetch_optional(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn get_schedules(
conn: &mut PoolConnection<Sqlite>,
relay: &DbRelay,
) -> Result<Vec<DbSchedule>, DatabaseError> {
sqlx::query_as!(
DbSchedule,
r#"SELECT schedules.* FROM schedules INNER JOIN junction_relay_schedule
ON junction_relay_schedule.schedule_id = schedules.id
WHERE junction_relay_schedule.relay_id = ?
ORDER BY junction_relay_schedule.weekday"#,
relay.id
)
.fetch_all(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn set_schedule(
conn: &mut PoolConnection<Sqlite>,
relay: &DbRelay,
schedule: &DbSchedule,
weekday: Weekday,
) -> Result<DbJunctionRelaySchedule, DatabaseError> {
match Self::get_junction_by_relay_and_weekday(conn, relay, weekday).await? {
None => sqlx::query_as!(
DbJunctionRelaySchedule,
"INSERT INTO junction_relay_schedule (weekday, relay_id, schedule_id) VALUES (?, ?, ?) RETURNING *",
weekday,
relay.id,
schedule.id
)
.fetch_optional(conn.deref_mut())
.await?
.ok_or(DatabaseError::InsertGetError),
Some(junction) => {
sqlx::query!(
"UPDATE junction_relay_schedule SET weekday = ?, relay_id = ?, schedule_id= ? WHERE id = ?",
weekday,
relay.id,
schedule.id,
junction.id
)
.execute(conn.deref_mut())
.await?;
Self::get(conn, junction.id)
.await?
.ok_or(DatabaseError::UpdateGetError)
}
}
}
pub async fn set_schedules(
conn: &mut PoolConnection<Sqlite>,
relay: &DbRelay,
schedules: Vec<&DbSchedule>,
) -> Result<(), DatabaseError> {
for (weekday, schedule) in schedules.iter().enumerate() {
Self::set_schedule(conn, relay, schedule, weekday as Weekday).await?;
}
Ok(())
}
}

View file

@ -1,48 +0,0 @@
use std::ops::DerefMut;
use sqlx::pool::PoolConnection;
use sqlx::Sqlite;
use crate::db::{DbRelay, DbSchedule, DbTag};
use crate::errors::DatabaseError;
pub struct DbJunctionTag {
pub id: i64,
pub tag_id: i64,
pub relay_id: Option<i64>,
pub schedule_id: Option<i64>,
}
impl DbJunctionTag {
pub async fn link_relay(
conn: &mut PoolConnection<Sqlite>,
tag: &DbTag,
relay: &DbRelay,
) -> Result<DbJunctionTag, DatabaseError> {
sqlx::query_as!(
DbJunctionTag,
"INSERT INTO junction_tag (tag_id, relay_id) VALUES (?, ?) RETURNING *",
tag.id,
relay.id
)
.fetch_optional(conn.deref_mut())
.await?
.ok_or(DatabaseError::InsertGetError)
}
pub async fn link_schedule(
conn: &mut PoolConnection<Sqlite>,
tag: &DbTag,
schedule: &DbSchedule,
) -> Result<DbJunctionTag, DatabaseError> {
sqlx::query_as!(
DbJunctionTag,
"INSERT INTO junction_tag (tag_id, schedule_id) VALUES (?, ?) RETURNING *",
tag.id,
schedule.id
)
.fetch_optional(conn.deref_mut())
.await?
.ok_or(DatabaseError::InsertGetError)
}
}

View file

@ -1,166 +0,0 @@
use std::ops::DerefMut;
use serde_derive::{Deserialize, Serialize};
use sqlx::pool::PoolConnection;
use sqlx::Sqlite;
use crate::db::{DbController, DbMacroAction, DbRelay, DbSchedule};
use crate::errors::DatabaseError;
use crate::types::{EmgauwaUid, RequestMacroAction};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DbMacro {
#[serde(skip)]
pub id: i64,
#[serde(rename = "id")]
pub uid: EmgauwaUid,
pub name: String,
}
impl DbMacro {
pub async fn get_all(conn: &mut PoolConnection<Sqlite>) -> Result<Vec<DbMacro>, DatabaseError> {
sqlx::query_as!(DbMacro, "SELECT * FROM macros")
.fetch_all(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn get(
conn: &mut PoolConnection<Sqlite>,
id: i64,
) -> Result<Option<DbMacro>, DatabaseError> {
sqlx::query_as!(DbMacro, "SELECT * FROM macros WHERE id = ?", id)
.fetch_optional(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn get_by_uid(
conn: &mut PoolConnection<Sqlite>,
filter_uid: &EmgauwaUid,
) -> Result<Option<DbMacro>, DatabaseError> {
sqlx::query_as!(DbMacro, "SELECT * FROM macros WHERE uid = ?", filter_uid)
.fetch_optional(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn create(
conn: &mut PoolConnection<Sqlite>,
new_uid: EmgauwaUid,
new_name: &str,
) -> Result<DbMacro, DatabaseError> {
sqlx::query_as!(
DbMacro,
"INSERT INTO macros (uid, name) VALUES (?, ?) RETURNING *",
new_uid,
new_name
)
.fetch_optional(conn.deref_mut())
.await?
.ok_or(DatabaseError::InsertGetError)
}
pub async fn delete(&self, conn: &mut PoolConnection<Sqlite>) -> Result<(), DatabaseError> {
sqlx::query!("DELETE FROM macros WHERE id = ?", self.id)
.execute(conn.deref_mut())
.await
.map(|res| match res.rows_affected() {
0 => Err(DatabaseError::DeleteError),
_ => Ok(()),
})?
}
pub async fn delete_by_uid(
conn: &mut PoolConnection<Sqlite>,
filter_uid: EmgauwaUid,
) -> Result<(), DatabaseError> {
if sqlx::query_scalar!("SELECT 1 FROM macros WHERE uid = ?", filter_uid)
.fetch_optional(conn.deref_mut())
.await?
.is_none()
{
return Err(DatabaseError::NotFound);
}
sqlx::query!("DELETE FROM macros WHERE uid = ?", filter_uid)
.execute(conn.deref_mut())
.await
.map(|res| match res.rows_affected() {
0 => Err(DatabaseError::DeleteError),
_ => Ok(()),
})?
}
pub async fn update(
&self,
conn: &mut PoolConnection<Sqlite>,
new_name: &str,
) -> Result<DbMacro, DatabaseError> {
sqlx::query!("UPDATE relays SET name = ? WHERE id = ?", new_name, self.id,)
.execute(conn.deref_mut())
.await?;
DbMacro::get(conn, self.id)
.await?
.ok_or(DatabaseError::UpdateGetError)
}
pub async fn set_actions(
&self,
conn: &mut PoolConnection<Sqlite>,
new_actions: &[RequestMacroAction],
) -> Result<(), DatabaseError> {
sqlx::query!("DELETE FROM macro_actions WHERE macro_id = ?", self.id)
.execute(conn.deref_mut())
.await?;
for new_action in new_actions {
let controller = DbController::get_by_uid(conn, &new_action.relay.controller_id)
.await?
.ok_or(DatabaseError::NotFound)?;
let relay =
DbRelay::get_by_controller_and_num(conn, &controller, new_action.relay.number)
.await?
.ok_or(DatabaseError::NotFound)?;
let schedule = DbSchedule::get_by_uid(conn, &new_action.schedule.id)
.await?
.ok_or(DatabaseError::NotFound)?;
DbMacroAction::create(conn, self, &relay, &schedule, new_action.weekday).await?;
}
Ok(())
}
pub async fn get_actions(
&self,
conn: &mut PoolConnection<Sqlite>,
) -> Result<Vec<DbMacroAction>, DatabaseError> {
sqlx::query_as!(
DbMacroAction,
"SELECT * FROM macro_actions WHERE macro_id = ?",
self.id
)
.fetch_all(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn get_actions_weekday(
&self,
conn: &mut PoolConnection<Sqlite>,
weekday: i64,
) -> Result<Vec<DbMacroAction>, DatabaseError> {
sqlx::query_as!(
DbMacroAction,
"SELECT * FROM macro_actions WHERE macro_id = ? AND weekday = ?",
self.id,
weekday
)
.fetch_all(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
}

View file

@ -1,99 +0,0 @@
use std::ops::DerefMut;
use sqlx::pool::PoolConnection;
use sqlx::Sqlite;
use crate::db::{DbMacro, DbRelay, DbSchedule};
use crate::errors::DatabaseError;
#[derive(Debug, Clone)]
pub struct DbMacroAction {
pub id: i64,
pub macro_id: i64,
pub relay_id: i64,
pub schedule_id: i64,
pub weekday: i64, // should be u8, but sqlite will store it as i64
}
impl DbMacroAction {
pub async fn get_all(
conn: &mut PoolConnection<Sqlite>,
) -> Result<Vec<DbMacroAction>, DatabaseError> {
sqlx::query_as!(DbMacroAction, "SELECT * FROM macro_actions")
.fetch_all(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn get(
conn: &mut PoolConnection<Sqlite>,
id: i64,
) -> Result<Option<DbMacroAction>, DatabaseError> {
sqlx::query_as!(
DbMacroAction,
"SELECT * FROM macro_actions WHERE id = ?",
id
)
.fetch_optional(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn create(
conn: &mut PoolConnection<Sqlite>,
new_macro: &DbMacro,
new_relay: &DbRelay,
new_schedule: &DbSchedule,
new_weekday: i64,
) -> Result<DbMacroAction, DatabaseError> {
sqlx::query_as!(
DbMacroAction,
"INSERT INTO macro_actions (macro_id, relay_id, schedule_id, weekday) VALUES (?, ?, ?, ?) RETURNING *",
new_macro.id,
new_relay.id,
new_schedule.id,
new_weekday
)
.fetch_optional(conn.deref_mut())
.await?
.ok_or(DatabaseError::InsertGetError)
}
pub async fn delete(&self, conn: &mut PoolConnection<Sqlite>) -> Result<(), DatabaseError> {
sqlx::query!("DELETE FROM macro_actions WHERE id = ?", self.id)
.execute(conn.deref_mut())
.await
.map(|res| match res.rows_affected() {
0 => Err(DatabaseError::DeleteError),
_ => Ok(()),
})?
}
pub async fn get_relay(
&self,
conn: &mut PoolConnection<Sqlite>,
) -> Result<DbRelay, DatabaseError> {
DbRelay::get(conn, self.relay_id)
.await?
.ok_or(DatabaseError::NotFound)
}
pub async fn get_schedule(
&self,
conn: &mut PoolConnection<Sqlite>,
) -> Result<DbSchedule, DatabaseError> {
DbSchedule::get(conn, self.schedule_id)
.await?
.ok_or(DatabaseError::NotFound)
}
pub async fn get_macro(
&self,
conn: &mut PoolConnection<Sqlite>,
) -> Result<DbMacro, DatabaseError> {
DbMacro::get(conn, self.macro_id)
.await?
.ok_or(DatabaseError::NotFound)
}
}

View file

@ -1,55 +0,0 @@
use std::str::FromStr;
use sqlx::migrate::Migrator;
use sqlx::sqlite::{SqliteConnectOptions, SqlitePoolOptions};
use sqlx::{ConnectOptions, Pool, Sqlite};
mod controllers;
mod junction_relay_schedule;
mod junction_tag;
mod r#macro;
mod macro_action;
mod model_utils;
mod relays;
mod schedules;
mod tag;
pub use controllers::DbController;
pub use junction_relay_schedule::DbJunctionRelaySchedule;
pub use junction_tag::DbJunctionTag;
pub use macro_action::DbMacroAction;
pub use r#macro::DbMacro;
pub use relays::DbRelay;
pub use schedules::{DbPeriods, DbSchedule};
pub use tag::DbTag;
use crate::errors::{DatabaseError, EmgauwaError};
static MIGRATOR: Migrator = sqlx::migrate!("../migrations"); // defaults to "./migrations"
pub async fn run_migrations(pool: &Pool<Sqlite>) -> Result<(), EmgauwaError> {
log::info!("Running migrations");
MIGRATOR.run(pool).await.map_err(DatabaseError::from)?;
Ok(())
}
pub async fn init(db: &str) -> Result<Pool<Sqlite>, EmgauwaError> {
let options = SqliteConnectOptions::from_str(db)?
.create_if_missing(true)
.log_statements(log::LevelFilter::Trace);
let pool: Pool<Sqlite> = SqlitePoolOptions::new()
.acquire_timeout(std::time::Duration::from_secs(1))
.max_connections(5)
.connect_with(options)
.await?;
run_migrations(&pool).await?;
let mut pool_conn = pool.acquire().await?;
DbSchedule::get_on(&mut pool_conn).await?;
DbSchedule::get_off(&mut pool_conn).await?;
Ok(pool)
}

View file

@ -1,137 +0,0 @@
use chrono::{NaiveTime, Timelike};
use serde::{Deserialize, Serialize};
use sqlx::database::HasArguments;
use sqlx::encode::IsNull;
use sqlx::error::BoxDynError;
use sqlx::sqlite::{SqliteTypeInfo, SqliteValueRef};
use sqlx::{Decode, Encode, Sqlite, Type};
use crate::db::DbPeriods;
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
pub struct Period {
#[serde(with = "period_format")]
pub start: NaiveTime,
#[serde(with = "period_format")]
pub end: NaiveTime,
}
mod period_format {
use chrono::NaiveTime;
use serde::{self, Deserialize, Deserializer, Serializer};
const FORMAT: &str = "%H:%M";
pub fn serialize<S>(time: &NaiveTime, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let s = format!("{}", time.format(FORMAT));
serializer.serialize_str(&s)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<NaiveTime, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
NaiveTime::parse_from_str(&s, FORMAT).map_err(serde::de::Error::custom)
}
}
impl Period {
pub fn new(start: NaiveTime, end: NaiveTime) -> Self {
Period { start, end }
}
pub fn new_on() -> Self {
Period {
start: NaiveTime::MIN,
end: NaiveTime::MIN,
}
}
pub fn is_on(&self, now: &NaiveTime) -> bool {
self.start.eq(&self.end) || (self.start.le(now) && self.end.gt(now))
}
pub fn get_next_time(&self, now: &NaiveTime) -> Option<NaiveTime> {
if self.start.eq(&self.end) {
// this period is always on
return None;
}
let start_after_now = self.start.gt(now);
let end_after_now = self.end.gt(now);
let start_before_end = self.start.lt(&self.end);
match (start_after_now, end_after_now, start_before_end) {
(false, false, _) => None, // both before now
(true, false, _) => Some(self.start), // only start after now
(false, true, _) => Some(self.end), // only end after now
(true, true, true) => Some(self.start), // both after now but start first
(true, true, false) => Some(self.end), // both after now but end first
}
}
}
impl Type<Sqlite> for DbPeriods {
fn type_info() -> SqliteTypeInfo {
<&[u8] as Type<Sqlite>>::type_info()
}
fn compatible(ty: &SqliteTypeInfo) -> bool {
<&[u8] as Type<Sqlite>>::compatible(ty)
}
}
impl<'q> Encode<'q, Sqlite> for DbPeriods {
//noinspection DuplicatedCode
fn encode_by_ref(&self, buf: &mut <Sqlite as HasArguments<'q>>::ArgumentBuffer) -> IsNull {
<&Vec<u8> as Encode<Sqlite>>::encode(&Vec::from(self), buf)
}
}
impl<'r> Decode<'r, Sqlite> for DbPeriods {
fn decode(value: SqliteValueRef<'r>) -> Result<Self, BoxDynError> {
let blob = <&[u8] as Decode<Sqlite>>::decode(value)?;
Ok(DbPeriods::from(Vec::from(blob)))
}
}
impl From<&DbPeriods> for Vec<u8> {
fn from(periods: &DbPeriods) -> Vec<u8> {
periods
.0
.iter()
.flat_map(|period| {
let vec = vec![
period.start.hour() as u8,
period.start.minute() as u8,
period.end.hour() as u8,
period.end.minute() as u8,
];
vec
})
.collect()
}
}
impl From<Vec<u8>> for DbPeriods {
fn from(value: Vec<u8>) -> Self {
let mut vec = Vec::new();
for i in (3..value.len()).step_by(4) {
let start_val_h: u32 = value[i - 3] as u32;
let start_val_m: u32 = value[i - 2] as u32;
let end_val_h: u32 = value[i - 1] as u32;
let end_val_m: u32 = value[i] as u32;
vec.push(Period {
start: NaiveTime::from_hms_opt(start_val_h, start_val_m, 0)
.expect("Failed to parse period start time from database"),
end: NaiveTime::from_hms_opt(end_val_h, end_val_m, 0)
.expect("Failed to parse period end time from database"),
});
}
DbPeriods(vec)
}
}

View file

@ -1,177 +0,0 @@
use std::ops::DerefMut;
use serde_derive::{Deserialize, Serialize};
use sqlx::pool::PoolConnection;
use sqlx::Sqlite;
use crate::db::{DbController, DbJunctionRelaySchedule, DbJunctionTag, DbSchedule, DbTag};
use crate::errors::DatabaseError;
use crate::types::Weekday;
use crate::utils;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DbRelay {
#[serde(skip)]
pub id: i64,
pub name: String,
pub number: i64,
#[serde(skip)]
pub controller_id: i64,
}
impl DbRelay {
pub async fn get_all(conn: &mut PoolConnection<Sqlite>) -> Result<Vec<DbRelay>, DatabaseError> {
sqlx::query_as!(DbRelay, "SELECT * FROM relays")
.fetch_all(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn get(
conn: &mut PoolConnection<Sqlite>,
id: i64,
) -> Result<Option<DbRelay>, DatabaseError> {
sqlx::query_as!(DbRelay, "SELECT * FROM relays WHERE id = ?", id)
.fetch_optional(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn get_by_controller_and_num(
conn: &mut PoolConnection<Sqlite>,
controller: &DbController,
number: i64,
) -> Result<Option<DbRelay>, DatabaseError> {
sqlx::query_as!(
DbRelay,
"SELECT * FROM relays WHERE controller_id = ? AND number = ?",
controller.id,
number
)
.fetch_optional(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn get_by_controller_and_num_or_create(
conn: &mut PoolConnection<Sqlite>,
controller: &DbController,
number: i64,
new_name: &str,
) -> Result<(DbRelay, bool), DatabaseError> {
match DbRelay::get_by_controller_and_num(conn, controller, number).await? {
Some(relay) => Ok((relay, false)),
None => {
let relay = DbRelay::create(conn, new_name, number, controller).await?;
Ok((relay, true))
}
}
}
pub async fn get_by_tag(
conn: &mut PoolConnection<Sqlite>,
tag: &DbTag,
) -> Result<Vec<DbRelay>, DatabaseError> {
sqlx::query_as!(DbRelay, "SELECT relay.* FROM relays AS relay INNER JOIN junction_tag ON junction_tag.relay_id = relay.id WHERE junction_tag.tag_id = ?", tag.id)
.fetch_all(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn create(
conn: &mut PoolConnection<Sqlite>,
new_name: &str,
new_number: i64,
new_controller: &DbController,
) -> Result<DbRelay, DatabaseError> {
sqlx::query_as!(
DbRelay,
"INSERT INTO relays (name, number, controller_id) VALUES (?, ?, ?) RETURNING *",
new_name,
new_number,
new_controller.id,
)
.fetch_optional(conn.deref_mut())
.await?
.ok_or(DatabaseError::InsertGetError)
}
pub async fn delete(&self, conn: &mut PoolConnection<Sqlite>) -> Result<(), DatabaseError> {
sqlx::query!("DELETE FROM relays WHERE id = ?", self.id)
.execute(conn.deref_mut())
.await
.map(|res| match res.rows_affected() {
0 => Err(DatabaseError::DeleteError),
_ => Ok(()),
})?
}
pub async fn update(
&self,
conn: &mut PoolConnection<Sqlite>,
new_name: &str,
) -> Result<DbRelay, DatabaseError> {
sqlx::query!("UPDATE relays SET name = ? WHERE id = ?", new_name, self.id,)
.execute(conn.deref_mut())
.await?;
DbRelay::get(conn, self.id)
.await?
.ok_or(DatabaseError::UpdateGetError)
}
pub async fn get_controller(
&self,
conn: &mut PoolConnection<Sqlite>,
) -> Result<DbController, DatabaseError> {
DbController::get(conn, self.controller_id)
.await?
.ok_or(DatabaseError::NotFound)
}
pub async fn get_tags(
&self,
conn: &mut PoolConnection<Sqlite>,
) -> Result<Vec<String>, DatabaseError> {
sqlx::query_scalar!("SELECT tag FROM tags INNER JOIN junction_tag ON junction_tag.tag_id = tags.id WHERE junction_tag.relay_id = ?", self.id)
.fetch_all(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn set_tags(
&self,
conn: &mut PoolConnection<Sqlite>,
new_tags: &[String],
) -> Result<(), DatabaseError> {
sqlx::query!("DELETE FROM junction_tag WHERE relay_id = ?", self.id)
.execute(conn.deref_mut())
.await?;
for new_tag in new_tags {
let tag: DbTag = DbTag::get_by_tag_or_create(conn, new_tag).await?;
DbJunctionTag::link_relay(conn, &tag, self).await?;
}
Ok(())
}
pub async fn reload(
&self,
conn: &mut PoolConnection<Sqlite>,
) -> Result<DbRelay, DatabaseError> {
Self::get(conn, self.id)
.await?
.ok_or(DatabaseError::NotFound)
}
pub async fn get_active_schedule(
&self,
conn: &mut PoolConnection<Sqlite>,
) -> Result<DbSchedule, DatabaseError> {
let weekday = utils::get_weekday();
DbJunctionRelaySchedule::get_schedule(conn, self, weekday as Weekday)
.await?
.ok_or(DatabaseError::NotFound)
}
}

View file

@ -1,209 +0,0 @@
use std::borrow::Borrow;
use std::ops::DerefMut;
use chrono::NaiveTime;
use serde_derive::{Deserialize, Serialize};
use sqlx::pool::PoolConnection;
use sqlx::Sqlite;
use crate::db::model_utils::Period;
use crate::db::{DbJunctionTag, DbTag};
use crate::errors::DatabaseError;
use crate::types::ScheduleUid;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DbSchedule {
#[serde(skip)]
pub id: i64,
#[serde(rename = "id")]
pub uid: ScheduleUid,
pub name: String,
pub periods: DbPeriods,
}
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
pub struct DbPeriods(pub Vec<Period>);
impl DbSchedule {
pub async fn get_all(
conn: &mut PoolConnection<Sqlite>,
) -> Result<Vec<DbSchedule>, DatabaseError> {
sqlx::query_as!(DbSchedule, "SELECT * FROM schedules")
.fetch_all(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn get(
conn: &mut PoolConnection<Sqlite>,
id: i64,
) -> Result<Option<DbSchedule>, DatabaseError> {
sqlx::query_as!(DbSchedule, "SELECT * FROM schedules WHERE id = ?", id)
.fetch_optional(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn get_by_uid(
conn: &mut PoolConnection<Sqlite>,
filter_uid: &ScheduleUid,
) -> Result<Option<DbSchedule>, DatabaseError> {
sqlx::query_as!(
DbSchedule,
"SELECT * FROM schedules WHERE uid = ?",
filter_uid
)
.fetch_optional(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn get_by_tag(
conn: &mut PoolConnection<Sqlite>,
tag: &DbTag,
) -> Result<Vec<DbSchedule>, DatabaseError> {
sqlx::query_as!(DbSchedule, "SELECT schedule.* FROM schedules AS schedule INNER JOIN junction_tag ON junction_tag.schedule_id = schedule.id WHERE junction_tag.tag_id = ?", tag.id)
.fetch_all(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn delete_by_uid(
conn: &mut PoolConnection<Sqlite>,
filter_uid: ScheduleUid,
) -> Result<(), DatabaseError> {
let filter_uid = match filter_uid {
ScheduleUid::Off => Err(DatabaseError::Protected),
ScheduleUid::On => Err(DatabaseError::Protected),
ScheduleUid::Any(_) => Ok(filter_uid),
}?;
if sqlx::query_scalar!("SELECT 1 FROM schedules WHERE uid = ?", filter_uid)
.fetch_optional(conn.deref_mut())
.await?
.is_none()
{
return Err(DatabaseError::NotFound);
}
sqlx::query!("DELETE FROM schedules WHERE uid = ?", filter_uid)
.execute(conn.deref_mut())
.await
.map(|res| match res.rows_affected() {
0 => Err(DatabaseError::DeleteError),
_ => Ok(()),
})?
}
pub async fn create(
conn: &mut PoolConnection<Sqlite>,
new_uid: ScheduleUid,
new_name: &str,
new_periods: &DbPeriods,
) -> Result<DbSchedule, DatabaseError> {
sqlx::query_as!(
DbSchedule,
"INSERT INTO schedules (uid, name, periods) VALUES (?, ?, ?) RETURNING *",
new_uid,
new_name,
new_periods,
)
.fetch_optional(conn.deref_mut())
.await?
.ok_or(DatabaseError::InsertGetError)
}
pub async fn get_by_uid_or_create(
conn: &mut PoolConnection<Sqlite>,
uid: ScheduleUid,
name: &str,
periods: &DbPeriods,
) -> Result<(DbSchedule, bool), DatabaseError> {
match DbSchedule::get_by_uid(conn, &uid).await? {
Some(schedule) => Ok((schedule, false)),
None => {
let schedule = DbSchedule::create(conn, uid, name, periods).await?;
Ok((schedule, true))
}
}
}
pub async fn get_on(conn: &mut PoolConnection<Sqlite>) -> Result<DbSchedule, DatabaseError> {
if let Some(schedule) = DbSchedule::get_by_uid(conn, &ScheduleUid::On).await? {
return Ok(schedule);
}
let periods = DbPeriods(vec![Period::new_on()]);
Self::create(conn, ScheduleUid::On, "On", &periods).await
}
pub async fn get_off(conn: &mut PoolConnection<Sqlite>) -> Result<DbSchedule, DatabaseError> {
if let Some(schedule) = DbSchedule::get_by_uid(conn, &ScheduleUid::Off).await? {
return Ok(schedule);
}
let periods = DbPeriods(vec![]);
Self::create(conn, ScheduleUid::Off, "Off", &periods).await
}
pub async fn update(
&self,
conn: &mut PoolConnection<Sqlite>,
new_name: &str,
new_periods: &DbPeriods,
) -> Result<DbSchedule, DatabaseError> {
// overwrite periods on protected schedules
let new_periods = match self.uid {
ScheduleUid::Off | ScheduleUid::On => self.periods.borrow(),
ScheduleUid::Any(_) => new_periods,
};
sqlx::query!(
"UPDATE schedules SET name = ?, periods = ? WHERE id = ?",
new_name,
new_periods,
self.id,
)
.execute(conn.deref_mut())
.await?;
DbSchedule::get(conn, self.id)
.await?
.ok_or(DatabaseError::UpdateGetError)
}
pub async fn get_tags(
&self,
conn: &mut PoolConnection<Sqlite>,
) -> Result<Vec<String>, DatabaseError> {
Ok(sqlx::query_scalar!("SELECT tag FROM tags INNER JOIN junction_tag ON junction_tag.tag_id = tags.id WHERE junction_tag.schedule_id = ?", self.id)
.fetch_all(conn.deref_mut())
.await?)
}
pub async fn set_tags(
&self,
conn: &mut PoolConnection<Sqlite>,
new_tags: &[String],
) -> Result<(), DatabaseError> {
sqlx::query!("DELETE FROM junction_tag WHERE schedule_id = ?", self.id)
.execute(conn.deref_mut())
.await?;
for new_tag in new_tags {
let tag: DbTag = DbTag::get_by_tag_or_create(conn, new_tag).await?;
DbJunctionTag::link_schedule(conn, &tag, self).await?;
}
Ok(())
}
pub fn is_on(&self, now: &NaiveTime) -> bool {
self.periods.0.iter().any(|period| period.is_on(now))
}
pub fn get_next_time(&self, now: &NaiveTime) -> Option<NaiveTime> {
self.periods
.0
.iter()
.filter_map(|period| period.get_next_time(now))
.min()
}
}

View file

@ -1,91 +0,0 @@
use std::ops::DerefMut;
use serde_derive::Serialize;
use sqlx::pool::PoolConnection;
use sqlx::Sqlite;
use crate::errors::DatabaseError;
#[derive(Debug, Serialize, Clone)]
pub struct DbTag {
pub id: i64,
pub tag: String,
}
impl DbTag {
pub async fn create(
conn: &mut PoolConnection<Sqlite>,
new_tag: &str,
) -> Result<DbTag, DatabaseError> {
if new_tag.is_empty() {
return Err(DatabaseError::EmptyDataInsert);
}
sqlx::query_as!(
DbTag,
"INSERT INTO tags (tag) VALUES (?) RETURNING *",
new_tag
)
.fetch_optional(conn.deref_mut())
.await?
.ok_or(DatabaseError::InsertGetError)
}
pub async fn get_all(conn: &mut PoolConnection<Sqlite>) -> Result<Vec<DbTag>, DatabaseError> {
sqlx::query_as!(DbTag, "SELECT * FROM tags")
.fetch_all(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn get(
conn: &mut PoolConnection<Sqlite>,
id: i64,
) -> Result<Option<DbTag>, DatabaseError> {
sqlx::query_as!(DbTag, "SELECT * FROM tags WHERE id = ?", id)
.fetch_optional(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn get_by_tag_or_create(
conn: &mut PoolConnection<Sqlite>,
target_tag: &str,
) -> Result<DbTag, DatabaseError> {
match DbTag::get_by_tag(conn, target_tag).await? {
Some(tag) => Ok(tag),
None => DbTag::create(conn, target_tag).await,
}
}
pub async fn get_by_tag(
conn: &mut PoolConnection<Sqlite>,
target_tag: &str,
) -> Result<Option<DbTag>, DatabaseError> {
sqlx::query_as!(DbTag, "SELECT * FROM tags WHERE tag = ?", target_tag)
.fetch_optional(conn.deref_mut())
.await
.map_err(DatabaseError::from)
}
pub async fn delete_by_tag(
conn: &mut PoolConnection<Sqlite>,
filter_tag: &str,
) -> Result<(), DatabaseError> {
if sqlx::query_scalar!("SELECT 1 FROM tags WHERE tag = ?", filter_tag)
.fetch_optional(conn.deref_mut())
.await?
.is_none()
{
return Err(DatabaseError::NotFound);
}
sqlx::query!("DELETE FROM tags WHERE tag = ?", filter_tag)
.execute(conn.deref_mut())
.await
.map(|res| match res.rows_affected() {
0 => Err(DatabaseError::DeleteError),
_ => Ok(()),
})?
}
}

View file

@ -1,35 +0,0 @@
use rppal::gpio::{Gpio, OutputPin};
use crate::drivers::RelayDriver;
use crate::errors::EmgauwaError;
pub struct GpioDriver {
pub gpio: OutputPin,
pub inverted: bool,
}
impl GpioDriver {
pub fn new(pin: u8, inverted: bool) -> Result<Self, EmgauwaError> {
let gpio = Gpio::new()?.get(pin)?.into_output();
Ok(Self { gpio, inverted })
}
}
impl RelayDriver for GpioDriver {
fn set(&mut self, value: bool) -> Result<(), EmgauwaError> {
if self.get_high(value) {
self.gpio.set_high();
} else {
self.gpio.set_low();
}
Ok(())
}
fn get_pin(&self) -> u8 {
self.gpio.pin()
}
fn get_inverted(&self) -> bool {
self.inverted
}
}

View file

@ -1,19 +0,0 @@
mod gpio;
mod null;
mod piface;
pub use gpio::GpioDriver;
pub use null::NullDriver;
pub use piface::PiFaceDriver;
use crate::errors::EmgauwaError;
pub trait RelayDriver {
fn get_high(&self, value: bool) -> bool {
value ^ self.get_inverted()
}
fn set(&mut self, value: bool) -> Result<(), EmgauwaError>;
fn get_pin(&self) -> u8;
fn get_inverted(&self) -> bool;
}

View file

@ -1,26 +0,0 @@
use crate::drivers::RelayDriver;
use crate::errors::EmgauwaError;
pub struct NullDriver {
pub pin: u8,
}
impl NullDriver {
pub fn new(pin: u8) -> Self {
Self { pin }
}
}
impl RelayDriver for NullDriver {
fn set(&mut self, _value: bool) -> Result<(), EmgauwaError> {
Ok(())
}
fn get_pin(&self) -> u8 {
self.pin
}
fn get_inverted(&self) -> bool {
false
}
}

View file

@ -1,52 +0,0 @@
use rppal_pfd::{
ChipSelect, HardwareAddress, OutputPin, PiFaceDigital, PiFaceDigitalError, SpiBus, SpiMode,
};
use crate::drivers::RelayDriver;
use crate::errors::EmgauwaError;
pub struct PiFaceDriver {
pub pfd_pin: OutputPin,
}
impl PiFaceDriver {
pub fn new(pin: u8, pfd: &Option<PiFaceDigital>) -> Result<Self, EmgauwaError> {
let pfd = pfd.as_ref().ok_or(EmgauwaError::Hardware(String::from(
"PiFaceDigital not initialized",
)))?;
let pfd_pin = pfd.get_output_pin(pin)?;
Ok(Self { pfd_pin })
}
pub fn init_piface() -> Result<PiFaceDigital, EmgauwaError> {
let mut pfd = PiFaceDigital::new(
HardwareAddress::new(0)?,
SpiBus::Spi0,
ChipSelect::Cs0,
100_000,
SpiMode::Mode0,
)?;
pfd.init()?;
Ok(pfd)
}
}
impl RelayDriver for PiFaceDriver {
fn set(&mut self, value: bool) -> Result<(), EmgauwaError> {
if self.get_high(value) {
self.pfd_pin.set_high().map_err(PiFaceDigitalError::from)?;
} else {
self.pfd_pin.set_low().map_err(PiFaceDigitalError::from)?;
}
Ok(())
}
fn get_pin(&self) -> u8 {
self.pfd_pin.get_pin_number()
}
fn get_inverted(&self) -> bool {
false
}
}

View file

@ -1,22 +0,0 @@
use actix_web::http::StatusCode;
#[derive(Debug)]
pub enum ApiError {
ProtectedSchedule,
}
impl ApiError {
pub fn get_code(&self) -> StatusCode {
match self {
ApiError::ProtectedSchedule => StatusCode::FORBIDDEN,
}
}
}
impl From<&ApiError> for String {
fn from(err: &ApiError) -> Self {
match err {
ApiError::ProtectedSchedule => String::from("the targeted schedule is protected"),
}
}
}

View file

@ -1,85 +0,0 @@
use actix_web::http::StatusCode;
use actix_web::HttpResponse;
use serde::ser::SerializeStruct;
use serde::{Serialize, Serializer};
use sqlx::migrate::MigrateError;
use sqlx::Error;
#[derive(Debug)]
pub enum DatabaseError {
DeleteError,
InsertError,
InsertGetError,
NotFound,
Protected,
EmptyDataInsert,
UpdateError,
UpdateGetError,
MigrationError(MigrateError),
Unknown(Error),
}
impl DatabaseError {
pub fn get_code(&self) -> StatusCode {
match self {
DatabaseError::NotFound => StatusCode::NOT_FOUND,
DatabaseError::Protected => StatusCode::FORBIDDEN,
_ => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
impl Serialize for DatabaseError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut s = serializer.serialize_struct("error", 3)?;
s.serialize_field("type", "database-error")?;
s.serialize_field("code", &self.get_code().as_u16())?;
s.serialize_field("description", &String::from(self))?;
s.end()
}
}
impl From<&DatabaseError> for String {
fn from(err: &DatabaseError) -> Self {
String::from(match err {
DatabaseError::InsertError => "error on inserting into database",
DatabaseError::InsertGetError => {
"error on retrieving new entry from database (your entry was saved)"
}
DatabaseError::NotFound => "model was not found in database",
DatabaseError::DeleteError => "error on deleting from database",
DatabaseError::Protected => "model is protected",
DatabaseError::UpdateError => "error on updating the model",
DatabaseError::UpdateGetError => {
"error on retrieving updated model from database (your entry was saved)"
}
DatabaseError::MigrationError(_) => "error on running migrations",
DatabaseError::Unknown(_) => "unknown error",
DatabaseError::EmptyDataInsert => "tried to insert empty data",
})
}
}
impl From<DatabaseError> for HttpResponse {
fn from(err: DatabaseError) -> Self {
HttpResponse::build(err.get_code()).json(err)
}
}
impl From<Error> for DatabaseError {
fn from(value: Error) -> Self {
match value {
Error::RowNotFound => DatabaseError::NotFound,
_ => DatabaseError::Unknown(value),
}
}
}
impl From<MigrateError> for DatabaseError {
fn from(value: MigrateError) -> Self {
Self::MigrationError(value)
}
}

View file

@ -1,161 +0,0 @@
use std::error::Error;
use std::fmt::{Debug, Display, Formatter};
use std::io::ErrorKind;
use actix::MailboxError;
use actix_web::http::StatusCode;
use actix_web::HttpResponse;
use config::ConfigError;
use rppal::gpio;
use rppal_mcp23s17::Mcp23s17Error;
use rppal_pfd::PiFaceDigitalError;
use serde::ser::SerializeStruct;
use serde::{Serialize, Serializer};
use crate::errors::{ApiError, DatabaseError};
use crate::types::EmgauwaUid;
#[derive(Debug)]
pub enum EmgauwaError {
Api(ApiError),
Uid(uuid::Error),
Serialization(serde_json::Error),
Database(DatabaseError),
Other(String),
Internal(String),
Connection(EmgauwaUid),
Hardware(String),
}
impl EmgauwaError {
fn get_code(&self) -> StatusCode {
match self {
EmgauwaError::Api(err) => err.get_code(),
EmgauwaError::Serialization(_) => StatusCode::INTERNAL_SERVER_ERROR,
EmgauwaError::Database(err) => err.get_code(),
EmgauwaError::Uid(_) => StatusCode::BAD_REQUEST,
EmgauwaError::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR,
EmgauwaError::Connection(_) => StatusCode::GATEWAY_TIMEOUT,
EmgauwaError::Other(_) => StatusCode::INTERNAL_SERVER_ERROR,
EmgauwaError::Hardware(_) => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
impl From<&EmgauwaError> for String {
fn from(err: &EmgauwaError) -> Self {
match err {
EmgauwaError::Api(err) => String::from(err),
EmgauwaError::Serialization(_) => String::from("error during (de-)serialization"),
EmgauwaError::Database(err) => String::from(err),
EmgauwaError::Uid(_) => String::from("the uid is in a bad format"),
EmgauwaError::Internal(_) => String::from("internal error"),
EmgauwaError::Connection(_) => String::from("the target controller is not connected"),
EmgauwaError::Other(err) => format!("other error: {}", err),
EmgauwaError::Hardware(err) => format!("hardware error: {}", err),
}
}
}
impl From<ApiError> for EmgauwaError {
fn from(value: ApiError) -> Self {
EmgauwaError::Api(value)
}
}
impl From<DatabaseError> for EmgauwaError {
fn from(value: DatabaseError) -> Self {
EmgauwaError::Database(value)
}
}
impl From<serde_json::Error> for EmgauwaError {
fn from(value: serde_json::Error) -> Self {
EmgauwaError::Serialization(value)
}
}
impl From<sqlx::Error> for EmgauwaError {
fn from(value: sqlx::Error) -> Self {
EmgauwaError::Database(DatabaseError::from(value))
}
}
impl From<uuid::Error> for EmgauwaError {
fn from(value: uuid::Error) -> Self {
EmgauwaError::Uid(value)
}
}
impl From<MailboxError> for EmgauwaError {
fn from(value: MailboxError) -> Self {
EmgauwaError::Internal(value.to_string())
}
}
impl From<ConfigError> for EmgauwaError {
fn from(value: ConfigError) -> Self {
Self::Other(value.to_string())
}
}
impl From<gpio::Error> for EmgauwaError {
fn from(value: gpio::Error) -> Self {
Self::Hardware(value.to_string())
}
}
impl From<PiFaceDigitalError> for EmgauwaError {
fn from(value: PiFaceDigitalError) -> Self {
match value {
PiFaceDigitalError::Mcp23s17Error { source } => match source {
Mcp23s17Error::SpiError { source } => Self::Hardware(source.to_string()),
_ => Self::Hardware(source.to_string()),
},
PiFaceDigitalError::GpioError { source } => Self::Hardware(source.to_string()),
_ => Self::Hardware(value.to_string()),
}
}
}
impl From<&EmgauwaError> for HttpResponse {
fn from(err: &EmgauwaError) -> Self {
HttpResponse::build(err.get_code()).json(err)
}
}
impl Error for EmgauwaError {}
impl From<EmgauwaError> for std::io::Error {
fn from(value: EmgauwaError) -> Self {
std::io::Error::new(ErrorKind::Other, value)
}
}
impl Serialize for EmgauwaError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut s = serializer.serialize_struct("error", 2)?;
s.serialize_field("code", &self.get_code().as_u16())?;
s.serialize_field("description", &String::from(self))?;
s.end()
}
}
impl Display for EmgauwaError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{}: {}", self.get_code(), String::from(self))
}
}
impl actix_web::error::ResponseError for EmgauwaError {
fn status_code(&self) -> StatusCode {
self.get_code()
}
fn error_response(&self) -> HttpResponse {
HttpResponse::from(self)
}
}

View file

@ -1,7 +0,0 @@
mod api_error;
mod database_error;
mod emgauwa_error;
pub use api_error::ApiError;
pub use database_error::DatabaseError;
pub use emgauwa_error::EmgauwaError;

View file

@ -1,8 +0,0 @@
pub mod constants;
pub mod db;
pub mod drivers;
pub mod errors;
pub mod models;
pub mod settings;
pub mod types;
pub mod utils;

View file

@ -1,86 +0,0 @@
use std::time::Instant;
use actix::MessageResponse;
use chrono::NaiveTime;
use futures::executor::block_on;
use serde_derive::{Deserialize, Serialize};
use sqlx::pool::PoolConnection;
use sqlx::Sqlite;
use crate::db::DbController;
use crate::errors::{DatabaseError, EmgauwaError};
use crate::models::{convert_db_list_cache, FromDbModel, Relay};
use crate::types::RelayStates;
#[derive(Serialize, Deserialize, Debug, Clone, MessageResponse)]
pub struct Controller {
#[serde(flatten)]
pub c: DbController,
pub relays: Vec<Relay>,
}
impl FromDbModel for Controller {
type DbModel = DbController;
type DbModelCache = Vec<Relay>;
fn from_db_model(
conn: &mut PoolConnection<Sqlite>,
db_model: Self::DbModel,
) -> Result<Self, DatabaseError> {
let relays_db = block_on(db_model.get_relays(conn))?;
let cache = convert_db_list_cache(conn, relays_db, db_model.clone())?;
Self::from_db_model_cache(conn, db_model, cache)
}
fn from_db_model_cache(
_conn: &mut PoolConnection<Sqlite>,
db_model: Self::DbModel,
cache: Self::DbModelCache,
) -> Result<Self, DatabaseError> {
Ok(Controller {
c: db_model,
relays: cache,
})
}
}
impl Controller {
pub fn reload(&mut self, conn: &mut PoolConnection<Sqlite>) -> Result<(), EmgauwaError> {
self.c = block_on(self.c.reload(conn))?;
for relay in &mut self.relays {
relay.reload(conn)?;
}
Ok(())
}
pub fn apply_relay_states(&mut self, relay_states: &RelayStates) {
self.relays
.iter_mut()
.zip(relay_states.iter())
.for_each(|(relay, is_on)| {
relay.is_on = *is_on;
});
}
pub fn get_relay_states(&self) -> RelayStates {
self.relays.iter().map(|r| r.is_on).collect()
}
pub fn get_next_time(&self, now: &NaiveTime) -> Option<NaiveTime> {
self.relays
.iter()
.filter_map(|r| r.active_schedule.get_next_time(now))
.min()
}
pub fn relay_pulse(&mut self, relay_num: i64, until: Instant) -> Result<(), EmgauwaError> {
let relay = self
.relays
.iter_mut()
.find(|r| r.r.number == relay_num)
.ok_or(EmgauwaError::Other(String::from("Relay not found")))?;
relay.pulsing = Some(until);
Ok(())
}
}

View file

@ -1,42 +0,0 @@
use futures::executor::block_on;
use serde_derive::{Deserialize, Serialize};
use sqlx::pool::PoolConnection;
use sqlx::Sqlite;
use crate::db::DbMacro;
use crate::errors::DatabaseError;
use crate::models::{convert_db_list, FromDbModel, MacroAction};
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Macro {
#[serde(flatten)]
pub m: DbMacro,
pub actions: Vec<MacroAction>,
}
impl FromDbModel for Macro {
type DbModel = DbMacro;
type DbModelCache = ();
fn from_db_model(
conn: &mut PoolConnection<Sqlite>,
db_model: Self::DbModel,
) -> Result<Self, DatabaseError> {
Self::from_db_model_cache(conn, db_model, ())
}
fn from_db_model_cache(
conn: &mut PoolConnection<Sqlite>,
db_model: Self::DbModel,
_cache: Self::DbModelCache,
) -> Result<Self, DatabaseError> {
let actions_db = block_on(db_model.get_actions(conn))?;
let actions: Vec<MacroAction> = convert_db_list(conn, actions_db)?;
Ok(Macro {
m: db_model,
actions,
})
}
}

View file

@ -1,56 +0,0 @@
use futures::executor::block_on;
use serde_derive::{Deserialize, Serialize};
use sqlx::pool::PoolConnection;
use sqlx::Sqlite;
use crate::db::{DbJunctionRelaySchedule, DbMacroAction};
use crate::errors::{DatabaseError, EmgauwaError};
use crate::models::{FromDbModel, Relay, Schedule};
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct MacroAction {
pub schedule: Schedule,
pub relay: Relay,
pub weekday: i64,
}
impl FromDbModel for MacroAction {
type DbModel = DbMacroAction;
type DbModelCache = ();
fn from_db_model(
conn: &mut PoolConnection<Sqlite>,
db_model: Self::DbModel,
) -> Result<Self, DatabaseError> {
Self::from_db_model_cache(conn, db_model, ())
}
fn from_db_model_cache(
conn: &mut PoolConnection<Sqlite>,
db_model: Self::DbModel,
_cache: Self::DbModelCache,
) -> Result<Self, DatabaseError> {
let schedule_db = block_on(db_model.get_schedule(conn))?;
let schedule = Schedule::from_db_model(conn, schedule_db)?;
let relay_db = block_on(db_model.get_relay(conn))?;
let relay = Relay::from_db_model(conn, relay_db)?;
let weekday = db_model.weekday;
Ok(MacroAction {
schedule,
relay,
weekday,
})
}
}
impl MacroAction {
pub async fn execute(&self, conn: &mut PoolConnection<Sqlite>) -> Result<(), EmgauwaError> {
DbJunctionRelaySchedule::set_schedule(conn, &self.relay.r, &self.schedule.s, self.weekday)
.await?;
Ok(())
}
}

View file

@ -1,68 +0,0 @@
mod controller;
mod r#macro;
mod macro_action;
mod relay;
mod schedule;
mod tag;
pub use controller::Controller;
pub use macro_action::MacroAction;
pub use r#macro::Macro;
pub use relay::Relay;
pub use schedule::Schedule;
use sqlx::pool::PoolConnection;
use sqlx::Sqlite;
pub use tag::Tag;
use crate::errors::DatabaseError;
pub trait FromDbModel {
type DbModel: Clone;
type DbModelCache: Clone;
fn from_db_model(
conn: &mut PoolConnection<Sqlite>,
db_model: Self::DbModel,
) -> Result<Self, DatabaseError>
where
Self: Sized;
fn from_db_model_cache(
conn: &mut PoolConnection<Sqlite>,
db_model: Self::DbModel,
cache: Self::DbModelCache,
) -> Result<Self, DatabaseError>
where
Self: Sized;
}
fn convert_db_list_generic<T: FromDbModel>(
conn: &mut PoolConnection<Sqlite>,
db_models: Vec<T::DbModel>,
cache: Option<T::DbModelCache>,
) -> Result<Vec<T>, DatabaseError> {
let mut result: Vec<T> = Vec::new();
for db_model in db_models {
let new = match &cache {
Some(c) => T::from_db_model_cache(conn, db_model, c.clone()),
None => T::from_db_model(conn, db_model),
}?;
result.push(new);
}
Ok(result)
}
pub fn convert_db_list<T: FromDbModel>(
conn: &mut PoolConnection<Sqlite>,
db_models: Vec<T::DbModel>,
) -> Result<Vec<T>, DatabaseError> {
convert_db_list_generic(conn, db_models, None)
}
pub fn convert_db_list_cache<T: FromDbModel>(
conn: &mut PoolConnection<Sqlite>,
db_models: Vec<T::DbModel>,
cache: T::DbModelCache,
) -> Result<Vec<T>, DatabaseError> {
convert_db_list_generic(conn, db_models, Some(cache))
}

View file

@ -1,107 +0,0 @@
use std::time::Instant;
use chrono::NaiveTime;
use futures::executor::block_on;
use serde_derive::{Deserialize, Serialize};
use sqlx::pool::PoolConnection;
use sqlx::Sqlite;
use crate::db::{DbController, DbJunctionRelaySchedule, DbRelay, DbSchedule};
use crate::errors::DatabaseError;
use crate::models::FromDbModel;
use crate::types::EmgauwaUid;
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Relay {
#[serde(flatten)]
pub r: DbRelay,
pub controller: DbController,
pub controller_id: EmgauwaUid,
pub schedules: Vec<DbSchedule>,
pub active_schedule: DbSchedule,
pub is_on: Option<bool>,
pub tags: Vec<String>,
// for internal use only.
#[serde(skip)]
pub pulsing: Option<Instant>,
}
impl FromDbModel for Relay {
type DbModel = DbRelay;
type DbModelCache = DbController;
fn from_db_model(
conn: &mut PoolConnection<Sqlite>,
db_model: Self::DbModel,
) -> Result<Self, DatabaseError> {
let cache = block_on(db_model.get_controller(conn))?;
Self::from_db_model_cache(conn, db_model, cache)
}
fn from_db_model_cache(
conn: &mut PoolConnection<Sqlite>,
db_model: Self::DbModel,
cache: Self::DbModelCache,
) -> Result<Self, DatabaseError> {
let tags = block_on(db_model.get_tags(conn))?;
let controller_id = cache.uid.clone();
let schedules = block_on(DbJunctionRelaySchedule::get_schedules(conn, &db_model))?;
let active_schedule = block_on(db_model.get_active_schedule(conn))?;
let is_on = None;
Ok(Relay {
r: db_model,
controller: cache,
controller_id,
schedules,
active_schedule,
is_on,
tags,
pulsing: None,
})
}
}
impl Relay {
pub fn reload(&mut self, conn: &mut PoolConnection<Sqlite>) -> Result<(), DatabaseError> {
self.r = block_on(self.r.reload(conn))?;
self.schedules = block_on(DbJunctionRelaySchedule::get_schedules(conn, &self.r))?;
self.reload_active_schedule(conn)?;
Ok(())
}
pub fn reload_active_schedule(
&mut self,
conn: &mut PoolConnection<Sqlite>,
) -> Result<(), DatabaseError> {
self.active_schedule = block_on(self.r.get_active_schedule(conn))?;
Ok(())
}
pub fn is_on(&self, now: &NaiveTime) -> bool {
self.active_schedule.is_on(now)
}
pub fn get_next_time(&self, now: &NaiveTime) -> Option<NaiveTime> {
self.active_schedule.get_next_time(now)
}
pub fn check_pulsing(&mut self, now: &Instant) -> Option<Instant> {
match self.pulsing {
Some(dur_instant) => {
if dur_instant.lt(now) {
self.pulsing = None;
None
} else {
Some(dur_instant)
}
}
None => None,
}
}
}

View file

@ -1,41 +0,0 @@
use futures::executor::block_on;
use serde_derive::{Deserialize, Serialize};
use sqlx::pool::PoolConnection;
use sqlx::Sqlite;
use crate::db::DbSchedule;
use crate::errors::DatabaseError;
use crate::models::FromDbModel;
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Schedule {
#[serde(flatten)]
pub s: DbSchedule,
pub tags: Vec<String>,
}
impl FromDbModel for Schedule {
type DbModel = DbSchedule;
type DbModelCache = Vec<String>;
fn from_db_model(
conn: &mut PoolConnection<Sqlite>,
db_model: Self::DbModel,
) -> Result<Self, DatabaseError> {
let cache = block_on(db_model.get_tags(conn))?;
Self::from_db_model_cache(conn, db_model, cache)
}
fn from_db_model_cache(
_conn: &mut PoolConnection<Sqlite>,
db_model: Self::DbModel,
cache: Self::DbModelCache,
) -> Result<Self, DatabaseError> {
let schedule = db_model.clone();
Ok(Schedule {
s: schedule,
tags: cache,
})
}
}

View file

@ -1,49 +0,0 @@
use actix::MessageResponse;
use futures::executor::block_on;
use serde_derive::{Deserialize, Serialize};
use sqlx::pool::PoolConnection;
use sqlx::Sqlite;
use crate::db::{DbRelay, DbSchedule, DbTag};
use crate::errors::DatabaseError;
use crate::models::{convert_db_list, FromDbModel, Relay, Schedule};
#[derive(Serialize, Deserialize, Debug, Clone, MessageResponse)]
pub struct Tag {
pub tag: String,
pub relays: Vec<Relay>,
pub schedules: Vec<Schedule>,
}
impl FromDbModel for Tag {
type DbModel = DbTag;
type DbModelCache = (Vec<Relay>, Vec<Schedule>);
fn from_db_model(
conn: &mut PoolConnection<Sqlite>,
db_model: Self::DbModel,
) -> Result<Self, DatabaseError> {
let db_schedules = block_on(DbSchedule::get_by_tag(conn, &db_model))?;
let schedules: Vec<Schedule> = convert_db_list(conn, db_schedules)?;
let db_relays = block_on(DbRelay::get_by_tag(conn, &db_model))?;
let relays: Vec<Relay> = convert_db_list(conn, db_relays)?;
let cache = (relays, schedules);
Self::from_db_model_cache(conn, db_model, cache)
}
fn from_db_model_cache(
_conn: &mut PoolConnection<Sqlite>,
db_model: Self::DbModel,
cache: Self::DbModelCache,
) -> Result<Self, DatabaseError> {
let tag = db_model.tag.clone();
let (relays, schedules) = cache;
Ok(Tag {
tag,
relays,
schedules,
})
}
}

View file

@ -1,67 +0,0 @@
use serde_derive::Deserialize;
use crate::constants;
use crate::errors::EmgauwaError;
#[derive(Clone, Debug, Deserialize)]
#[serde(default)]
#[allow(unused)]
pub struct Server {
pub host: String,
pub port: u16,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(default)]
#[allow(unused)]
pub struct Logging {
pub level: String,
pub file: String,
}
#[derive(Clone, Debug, Deserialize, Default)]
#[serde(default)]
#[allow(unused)]
pub struct Permissions {
pub user: String,
pub group: String,
}
impl Default for Server {
fn default() -> Self {
Server {
host: String::from("127.0.0.1"),
port: constants::DEFAULT_PORT,
}
}
}
impl Default for Logging {
fn default() -> Self {
Logging {
level: String::from("info"),
file: String::from("stdout"),
}
}
}
pub fn load<T>(config_name: &str, env_prefix: &str) -> Result<T, EmgauwaError>
where
for<'de> T: serde::Deserialize<'de>,
{
let etc_file =
config::File::with_name(&format!("/etc/emgauwa/{}", config_name)).required(false);
let local_file = config::File::with_name(&format!("./emgauwa-{}", config_name)).required(false);
config::Config::builder()
.add_source(etc_file)
.add_source(local_file)
.add_source(
config::Environment::with_prefix(&format!("EMGAUWA_{}", env_prefix))
.prefix_separator("__")
.separator("__"),
)
.build()?
.try_deserialize::<T>()
.map_err(EmgauwaError::from)
}

View file

@ -1,103 +0,0 @@
use std::fmt::{Display, Formatter};
use std::str::FromStr;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use sqlx::database::HasArguments;
use sqlx::encode::IsNull;
use sqlx::error::BoxDynError;
use sqlx::sqlite::{SqliteTypeInfo, SqliteValueRef};
use sqlx::{Decode, Encode, Sqlite, Type};
use uuid::Uuid;
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub struct EmgauwaUid(Uuid);
impl Default for EmgauwaUid {
fn default() -> Self {
Self(Uuid::new_v4())
}
}
impl Display for EmgauwaUid {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", String::from(self))
}
}
impl Serialize for EmgauwaUid {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
String::from(self).serialize(serializer)
}
}
impl<'de> Deserialize<'de> for EmgauwaUid {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Self::try_from(String::deserialize(deserializer)?.as_str())
.map_err(|_| serde::de::Error::custom("invalid uid"))
}
}
impl From<&EmgauwaUid> for String {
fn from(uid: &EmgauwaUid) -> String {
uid.0.as_hyphenated().to_string()
}
}
impl Type<Sqlite> for EmgauwaUid {
fn type_info() -> SqliteTypeInfo {
<&[u8] as Type<Sqlite>>::type_info()
}
fn compatible(ty: &SqliteTypeInfo) -> bool {
<&[u8] as Type<Sqlite>>::compatible(ty)
}
}
impl<'q> Encode<'q, Sqlite> for EmgauwaUid {
//noinspection DuplicatedCode
fn encode_by_ref(&self, buf: &mut <Sqlite as HasArguments<'q>>::ArgumentBuffer) -> IsNull {
<Vec<u8> as Encode<Sqlite>>::encode(Vec::from(self), buf)
}
}
impl<'r> Decode<'r, Sqlite> for EmgauwaUid {
//noinspection DuplicatedCode
fn decode(value: SqliteValueRef<'r>) -> Result<Self, BoxDynError> {
Self::try_from(<&[u8] as Decode<Sqlite>>::decode(value)?).map_err(Into::into)
}
}
impl From<&EmgauwaUid> for Vec<u8> {
fn from(uid: &EmgauwaUid) -> Vec<u8> {
uid.0.as_bytes().to_vec()
}
}
impl TryFrom<&str> for EmgauwaUid {
type Error = uuid::Error;
fn try_from(value: &str) -> Result<Self, Self::Error> {
let uuid = Uuid::from_str(value)?;
Ok(Self(uuid))
}
}
impl TryFrom<&[u8]> for EmgauwaUid {
type Error = uuid::Error;
fn try_from(value: &[u8]) -> Result<EmgauwaUid, uuid::Error> {
Ok(Self(Uuid::from_slice(value)?))
}
}
impl From<Vec<u8>> for EmgauwaUid {
fn from(value: Vec<u8>) -> Self {
Self::try_from(value.as_slice()).expect("Failed to parse uid from database")
}
}

View file

@ -1,29 +0,0 @@
mod emgauwa_uid;
mod request;
mod schedule_uid;
use actix::Message;
pub use emgauwa_uid::EmgauwaUid;
pub use request::*;
pub use schedule_uid::ScheduleUid;
use serde_derive::{Deserialize, Serialize};
use crate::db::DbSchedule;
use crate::errors::EmgauwaError;
use crate::models::{Controller, Relay};
pub type Weekday = i64;
pub type RelayStates = Vec<Option<bool>>;
#[derive(Debug, Serialize, Deserialize, Message)]
#[rtype(result = "Result<(), EmgauwaError>")]
pub enum ControllerWsAction {
Register(Controller),
Disconnect,
Schedules(Vec<DbSchedule>),
Relays(Vec<Relay>),
Controller(Controller),
RelayStates((EmgauwaUid, RelayStates)),
RelayPulse((i64, Option<u32>)),
}

View file

@ -1,95 +0,0 @@
use serde_derive::{Deserialize, Serialize};
use sqlx::pool::PoolConnection;
use sqlx::Sqlite;
use crate::db::{DbPeriods, DbSchedule};
use crate::errors::DatabaseError;
use crate::types::{EmgauwaUid, ScheduleUid};
#[derive(Debug, Serialize, Deserialize)]
pub struct RequestScheduleCreate {
pub name: String,
pub periods: DbPeriods,
pub tags: Option<Vec<String>>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RequestScheduleUpdate {
pub name: Option<String>,
pub periods: Option<DbPeriods>,
pub tags: Option<Vec<String>>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RequestRelayUpdate {
pub name: Option<String>,
pub active_schedule: Option<RequestScheduleId>,
pub schedules: Option<Vec<RequestScheduleId>>,
pub tags: Option<Vec<String>>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RequestRelayPulse {
pub duration: Option<u32>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RequestScheduleId {
pub id: ScheduleUid,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RequestControllerUpdate {
pub name: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RequestTagCreate {
pub tag: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RequestMacroActionRelay {
pub number: i64,
pub controller_id: EmgauwaUid,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RequestMacroActionSchedule {
pub id: ScheduleUid,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RequestMacroAction {
pub weekday: i64,
pub relay: RequestMacroActionRelay,
pub schedule: RequestMacroActionSchedule,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RequestMacroCreate {
pub name: String,
pub actions: Vec<RequestMacroAction>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RequestMacroUpdate {
pub name: Option<String>,
pub actions: Option<Vec<RequestMacroAction>>,
}
#[derive(Debug, Deserialize)]
pub struct RequestMacroExecute {
pub weekday: Option<i64>,
}
impl RequestScheduleId {
pub async fn get_schedule(
&self,
conn: &mut PoolConnection<Sqlite>,
) -> Result<DbSchedule, DatabaseError> {
DbSchedule::get_by_uid(conn, &self.id)
.await?
.ok_or(DatabaseError::NotFound)
}
}

View file

@ -1,171 +0,0 @@
use std::convert::TryFrom;
use std::fmt::{Debug, Formatter};
use std::str::FromStr;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use sqlx::database::HasArguments;
use sqlx::encode::IsNull;
use sqlx::error::BoxDynError;
use sqlx::sqlite::{SqliteTypeInfo, SqliteValueRef};
use sqlx::{Decode, Encode, Sqlite, Type};
use uuid::Uuid;
#[derive(Clone)]
pub enum ScheduleUid {
Off,
On,
Any(Uuid),
}
impl ScheduleUid {
const OFF_STR: &'static str = "off";
const OFF_U128: u128 = 0;
const OFF_U8: u8 = 0;
const ON_STR: &'static str = "on";
const ON_U128: u128 = 1;
const ON_U8: u8 = 1;
}
impl Default for ScheduleUid {
fn default() -> Self {
Self::Any(Uuid::new_v4())
}
}
impl Debug for ScheduleUid {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
Self::Off => Self::OFF_STR.fmt(f),
Self::On => Self::ON_STR.fmt(f),
Self::Any(value) => value.fmt(f),
}
}
}
impl PartialEq for ScheduleUid {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Self::Off, Self::Off) => true,
(Self::On, Self::On) => true,
(Self::Any(my_uuid), Self::Any(other_uuid)) => my_uuid == other_uuid,
_ => false,
}
}
}
impl Type<Sqlite> for ScheduleUid {
fn type_info() -> SqliteTypeInfo {
<&[u8] as Type<Sqlite>>::type_info()
}
fn compatible(ty: &SqliteTypeInfo) -> bool {
<&[u8] as Type<Sqlite>>::compatible(ty)
}
}
impl<'q> Encode<'q, Sqlite> for ScheduleUid {
//noinspection DuplicatedCode
fn encode_by_ref(&self, buf: &mut <Sqlite as HasArguments<'q>>::ArgumentBuffer) -> IsNull {
<Vec<u8> as Encode<Sqlite>>::encode(Vec::from(self), buf)
}
}
impl<'r> Decode<'r, Sqlite> for ScheduleUid {
//noinspection DuplicatedCode
fn decode(value: SqliteValueRef<'r>) -> Result<Self, BoxDynError> {
Self::try_from(<&[u8] as Decode<Sqlite>>::decode(value)?).map_err(Into::into)
}
}
impl Serialize for ScheduleUid {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
String::from(self).serialize(serializer)
}
}
impl<'de> Deserialize<'de> for ScheduleUid {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Self::try_from(String::deserialize(deserializer)?.as_str())
.map_err(|_| serde::de::Error::custom("invalid schedule uid"))
}
}
impl From<Uuid> for ScheduleUid {
fn from(uid: Uuid) -> Self {
match uid.as_u128() {
Self::OFF_U128 => Self::Off,
Self::ON_U128 => Self::On,
_ => Self::Any(uid),
}
}
}
impl TryFrom<&str> for ScheduleUid {
type Error = uuid::Error;
fn try_from(value: &str) -> Result<Self, Self::Error> {
match value {
Self::OFF_STR => Ok(Self::Off),
Self::ON_STR => Ok(Self::On),
any => match Uuid::from_str(any) {
Ok(uuid) => Ok(Self::Any(uuid)),
Err(err) => Err(err),
},
}
}
}
impl From<&ScheduleUid> for Uuid {
fn from(uid: &ScheduleUid) -> Uuid {
match uid {
ScheduleUid::Off => Uuid::from_u128(ScheduleUid::OFF_U128),
ScheduleUid::On => Uuid::from_u128(ScheduleUid::ON_U128),
ScheduleUid::Any(value) => *value,
}
}
}
impl From<&ScheduleUid> for String {
fn from(uid: &ScheduleUid) -> String {
match uid {
ScheduleUid::Off => String::from(ScheduleUid::OFF_STR),
ScheduleUid::On => String::from(ScheduleUid::ON_STR),
ScheduleUid::Any(value) => value.as_hyphenated().to_string(),
}
}
}
impl From<&ScheduleUid> for Vec<u8> {
fn from(uid: &ScheduleUid) -> Vec<u8> {
match uid {
ScheduleUid::Off => vec![ScheduleUid::OFF_U8],
ScheduleUid::On => vec![ScheduleUid::ON_U8],
ScheduleUid::Any(value) => value.as_bytes().to_vec(),
}
}
}
impl TryFrom<&[u8]> for ScheduleUid {
type Error = uuid::Error;
fn try_from(value: &[u8]) -> Result<Self, Self::Error> {
let result = match value {
[Self::OFF_U8] => Self::Off,
[Self::ON_U8] => Self::On,
value_bytes => Self::Any(Uuid::from_slice(value_bytes)?),
};
Ok(result)
}
}
impl From<Vec<u8>> for ScheduleUid {
fn from(value: Vec<u8>) -> Self {
Self::try_from(value.as_slice()).expect("Failed to parse schedule uid from database")
}
}

Some files were not shown because too many files have changed in this diff Show more