This commit is contained in:
JMARyA 2024-10-07 20:53:58 +02:00
parent 584ffb6b11
commit 1faa3b9668
Signed by: jmarya
GPG key ID: 901B2ADDF27C2263
19 changed files with 1058 additions and 1244 deletions

1149
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -4,11 +4,9 @@ version = "0.1.0"
edition = "2021" edition = "2021"
[dependencies] [dependencies]
chrono = "0.4.38"
futures = "0.3.30" futures = "0.3.30"
log = "0.4.20" log = "0.4.20"
mdq = { git = "https://git.hydrar.de/mdtools/mdq" } mdq = { git = "https://git.hydrar.de/mdtools/mdq" }
mongodb = "2.8.0"
rocket = { version = "0.5.1", features = ["json"] } rocket = { version = "0.5.1", features = ["json"] }
rocket_cors = "0.6.0" rocket_cors = "0.6.0"
serde = { version = "1.0.195", features = ["derive"] } serde = { version = "1.0.195", features = ["derive"] }
@ -16,8 +14,9 @@ serde_json = "1.0.111"
serde_yaml = "0.9.34" serde_yaml = "0.9.34"
tokio = { version = "1.35.1", features = ["full"] } tokio = { version = "1.35.1", features = ["full"] }
toml = "0.8.8" toml = "0.8.8"
uuid = { version = "1.8.0", features = ["v4"] }
mongod = { git = "https://git.hydrar.de/jmarya/mongod" }
env_logger = "0.11.5" env_logger = "0.11.5"
walkdir = "2.5.0" walkdir = "2.5.0"
reqwest = { version = "0.11", features = ["json"] } reqwest = { version = "0.11", features = ["json"] }
chrono = { version = "0.4.38", features = ["serde"] }
uuid = { version = "1.8.0", features = ["v4", "serde"] }
sqlx = { version = "0.8", features = ["postgres", "runtime-tokio-native-tls", "derive", "uuid", "chrono", "json"] }

View file

@ -5,25 +5,26 @@ services:
ports: ports:
- "8080:8080" - "8080:8080"
depends_on: depends_on:
- mongodb - postgres
volumes: volumes:
- ./itemdb/items:/itemdb - ./itemdb/items:/itemdb
- ./locations:/locations - ./locations:/locations
- ./flows:/flows - ./flows:/flows
- ./config.toml:/config.toml - ./config.toml:/config.toml
environment: environment:
- "DB_URI=mongodb://user:pass@mongodb:27017" - "DATABASE_URL=postgres://user:pass@postgres/cdb"
- "DB=cdb"
- "RUST_LOG=debug" - "RUST_LOG=debug"
- "ROCKET_ADDRESS=0.0.0.0" - "ROCKET_ADDRESS=0.0.0.0"
- "ROCKET_PORT=8080" - "ROCKET_PORT=8080"
mongodb: postgres:
image: mongo:latest image: timescale/timescaledb:latest-pg16
restart: always
ports: ports:
- "27017:27017" - 5432:5432
environment:
MONGO_INITDB_ROOT_USERNAME: user
MONGO_INITDB_ROOT_PASSWORD: pass
volumes: volumes:
- ./db:/data/db - ./db:/var/lib/postgresql/data/
environment:
- POSTGRES_USER=user
- POSTGRES_PASSWORD=pass
- POSTGRES_DB=cdb

32
migrations/0000_init.sql Normal file
View file

@ -0,0 +1,32 @@
CREATE TABLE flows (
id UUID NOT NULL PRIMARY KEY DEFAULT gen_random_uuid(),
"started" timestamptz NOT NULL DEFAULT current_timestamp,
kind TEXT NOT NULL,
input UUID[],
ended timestamptz,
"next" UUID,
produced UUID[],
FOREIGN KEY "next" REFERENCES flows(id)
);
CREATE TABLE flow_notes (
id UUID NOT NULL PRIMARY KEY DEFAULT gen_random_uuid(),
time timestamptz NOT NULL DEFAULT current_timestamp,
content TEXT NOT NULL,
on_flow UUID NOT NULL,
FOREIGN KEY on_flow REFERENCES flows(id)
);
CREATE TABLE transactions (
id UUID NOT NULL PRIMARY KEY DEFAULT gen_random_uuid(),
created timestamptz NOT NULL DEFAULT current_timestamp,
item TEXT NOT NULL,
variant TEXT NOT NULL,
price NUMERIC(2),
origin TEXT,
"location" TEXT,
note TEXT,
destination TEXT,
consumed_price NUMERIC(2),
consumed_timestamp timestamptz
);

View file

@ -1,10 +1,9 @@
use std::collections::HashMap; use std::collections::HashMap;
use mongod::Model;
use crate::item::Item; use crate::item::Item;
/// Item database /// Item database
#[derive(Debug)]
pub struct ItemDB { pub struct ItemDB {
index: HashMap<String, Item>, index: HashMap<String, Item>,
} }
@ -20,9 +19,7 @@ impl ItemDB {
for item in &index.documents { for item in &index.documents {
let item = Item::new(item); let item = Item::new(item);
item.insert_overwrite().await.unwrap(); items.insert(item.id.clone(), item);
log::info!("Adding item {} to DB", item.name);
items.insert(item._id.clone(), item);
} }
Self { index: items } Self { index: items }

View file

@ -1,11 +1,16 @@
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::collections::HashMap; use std::collections::HashMap;
use mongod::{assert_reference_of, reference_of, Reference, Validate}; use crate::routes::item::{item_does_not_exist_error, variant_does_not_exist_error, SupplyForm};
use crate::routes::{ApiError, ToAPI};
use crate::{get_itemdb, get_pg};
use sqlx::FromRow;
#[derive(Debug, Clone, Serialize, Deserialize, Model, Referencable)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FlowInfo { pub struct FlowInfo {
#[serde(default)] #[serde(default)]
pub _id: String, pub id: String,
pub name: String, pub name: String,
#[serde(default)] #[serde(default)]
pub depends: Vec<String>, pub depends: Vec<String>,
@ -13,23 +18,10 @@ pub struct FlowInfo {
pub produces: Option<Vec<String>>, pub produces: Option<Vec<String>>,
} }
impl Validate for FlowInfo {
async fn validate(&self) -> Result<(), String> {
Ok(())
}
}
impl FlowInfo {
pub async fn add(&mut self, id: &str) {
self._id = id.to_string();
self.insert_overwrite().await.unwrap();
}
}
impl ToAPI for FlowInfo { impl ToAPI for FlowInfo {
async fn api(&self) -> serde_json::Value { async fn api(&self) -> serde_json::Value {
json!({ json!({
"id": self._id, "id": self.id,
"name": self.name, "name": self.name,
"depends": self.depends, "depends": self.depends,
"next": self.next, "next": self.next,
@ -37,119 +29,68 @@ impl ToAPI for FlowInfo {
}) })
} }
} }
use mongod::{
derive::{Model, Referencable},
Model, Referencable, ToAPI,
};
use mongodb::bson::doc;
use serde::{Deserialize, Serialize};
use serde_json::json;
use crate::item::Item;
use crate::routes::item::{item_does_not_exist_error, variant_does_not_exist_error, SupplyForm};
use crate::routes::{api_error, ApiError};
use crate::transaction::{Price, Transaction};
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct DoneInfo {
/// Timestamp when the flow was ended
pub ended: i64,
/// The flow succedding this one
pub next: Option<Reference>,
/// Transactions this flow produced
pub produced: Option<Vec<Reference>>,
}
impl DoneInfo {
pub fn new(next: Option<Reference>) -> Self {
Self {
ended: chrono::Utc::now().timestamp(),
next,
produced: None,
}
}
pub fn api(&self) -> serde_json::Value {
json!({
"ended": self.ended,
"next": self.next.as_ref().map(|x| x.id()),
"produced": self.produced.as_ref().map(|x| x.iter().map(|t| t.id()).collect::<Vec<_>>()),
})
}
}
/// A production flow /// A production flow
#[derive(Debug, Clone, Serialize, Deserialize, Model, Referencable)] #[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
pub struct Flow { pub struct Flow {
/// ID /// ID
pub _id: String, pub id: uuid::Uuid,
/// Tiemstamp when the flow was started /// Tiemstamp when the flow was started
pub started: i64, pub started: chrono::DateTime<chrono::Utc>,
/// Kind of flow; ID of the describing JSON /// Kind of flow; ID of the describing JSON
pub kind: Reference, pub kind: String,
/// Input transactions /// Input transactions
pub input: Option<Vec<Reference>>, pub input: Option<Vec<uuid::Uuid>>,
/// Information when a flow is done /// Timestamp when the flow was ended
pub done: Option<DoneInfo>, pub ended: Option<chrono::DateTime<chrono::Utc>>,
} /// The flow succedding this one
pub next: Option<uuid::Uuid>,
impl Validate for Flow { /// Transactions this flow produced
async fn validate(&self) -> Result<(), String> { pub produced: Option<Vec<uuid::Uuid>>,
assert_reference_of!(self.kind, FlowInfo);
if let Some(input) = &self.input {
for t in input {
assert_reference_of!(t, Transaction);
}
}
if let Some(done) = &self.done {
if let Some(next) = &done.next {
assert_reference_of!(next, Flow);
}
if let Some(produced) = &done.produced {
for prod in produced {
assert_reference_of!(prod, Transaction);
}
}
}
Ok(())
}
} }
impl ToAPI for Flow { impl ToAPI for Flow {
async fn api(&self) -> serde_json::Value { async fn api(&self) -> serde_json::Value {
let done = if self.ended.is_some() {
Some(json!({
"ended": self.ended.map(|x| x.timestamp()),
"next": self.next,
"produced": self.produced
}))
} else {
None
};
json!({ json!({
"id": self._id, "id": self.id,
"started": self.started, "started": self.started.timestamp(),
"kind": self.kind.id(), "kind": self.kind,
"input": self.input.as_ref().map(|x| x.iter().map(|t| t.id()).collect::<Vec<_>>()), "input": self.input,
"done": self.done.as_ref().map(|x| x.api()) "done": done
}) })
} }
} }
impl Flow { impl Flow {
pub async fn create(kind: &str, input: Option<Vec<Reference>>) -> Self { pub async fn get(id: &uuid::Uuid) -> Option<Self> {
let f = Self { sqlx::query_as("SELECT * FROM flows WHERE id = $1")
_id: uuid::Uuid::new_v4().to_string(), .bind(id)
started: chrono::Utc::now().timestamp(), .fetch_optional(get_pg!())
kind: reference_of!(FlowInfo, kind).unwrap(), .await
input: input, .unwrap()
done: None, }
};
f.insert().await.unwrap(); pub async fn create(kind: &str, input: Option<Vec<uuid::Uuid>>) -> Self {
sqlx::query_as("INSERT INTO flows (kind, input) VALUES ($1, $2) RETURNING *")
f .bind(kind)
.bind(input)
.fetch_one(get_pg!())
.await
.unwrap()
} }
pub async fn end(self) -> Self { pub async fn end(self) -> Self {
self.change() sqlx::query_as("UPDATE flows SET ended = current_timestamp RETURNING *")
.done(Some(DoneInfo::new(None))) .fetch_one(get_pg!())
.update()
.await .await
.unwrap() .unwrap()
} }
@ -157,14 +98,14 @@ impl Flow {
pub async fn end_with_produce( pub async fn end_with_produce(
self, self,
produced: &[SupplyForm], produced: &[SupplyForm],
) -> Result<HashMap<String, Vec<String>>, ApiError> { ) -> Result<HashMap<String, Vec<uuid::Uuid>>, ApiError> {
let mut ret = HashMap::new(); let mut ret = HashMap::new();
let mut t_create = Vec::new(); let mut t_create = Vec::new();
let mut produced_ref = Vec::with_capacity(ret.len()); let mut produced_ref = Vec::with_capacity(ret.len());
for prod in produced { for prod in produced {
let t = Item::get(&prod.item) let t = get_itemdb!()
.await .get_item(&prod.item)
.ok_or_else(item_does_not_exist_error)? .ok_or_else(item_does_not_exist_error)?
.variant(&prod.variant) .variant(&prod.variant)
.ok_or_else(variant_does_not_exist_error)?; .ok_or_else(variant_does_not_exist_error)?;
@ -175,83 +116,73 @@ impl Flow {
for (item, info) in t_create { for (item, info) in t_create {
let t = item let t = item
.supply( .supply(
Price::zero(), 0.0,
Some(&format!("flow::{}::{}", self.kind.id(), self._id)), Some(&format!("flow::{}::{}", self.kind, self.id)),
info.location.as_ref().map(|x| x.as_str()), info.location.as_ref().map(|x| x.as_str()),
info.note.as_ref().map(|x| x.as_str()), info.note.as_ref().map(|x| x.as_str()),
) )
.await; .await;
ret.entry(item.item_variant_id().clone()) ret.entry(item.item_variant_id().clone())
.or_insert(Vec::new()) .or_insert(Vec::new())
.push(t._id.clone()); .push(t.id.clone());
produced_ref.push(t.reference()); produced_ref.push(t.id);
} }
self.change() sqlx::query("UPDATE transactions SET consumed_timestamp = current_timestamp, produced = $1 WHERE id = $2")
.done(Some(DoneInfo { .bind(produced_ref)
ended: chrono::Utc::now().timestamp(), .bind(self.id)
next: None, .execute(get_pg!()).await.unwrap();
produced: Some(produced_ref),
}))
.update()
.await
.unwrap();
Ok(ret) Ok(ret)
} }
pub async fn continue_next(self, next_flow: &Flow) -> Self { pub async fn continue_next(self, next_flow: &Flow) -> Self {
self.change() sqlx::query_as("UPDATE transactions SET consumed_timestamp = current_timestamp, \"next\" = $1 WHERE id = $2 RETURNING *")
.done(Some(DoneInfo::new(Some(next_flow.reference())))) .bind(next_flow.id)
.update() .bind(&self.id)
.await .fetch_one(get_pg!()).await.unwrap()
.unwrap()
} }
} }
/// A note for a Flow /// A note for a Flow
#[derive(Debug, Clone, Serialize, Deserialize, Model, Referencable)] #[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
pub struct FlowNote { pub struct FlowNote {
/// ID /// ID
pub _id: String, pub id: uuid::Uuid,
/// Tiemstamp when the note was created /// Timestamp when the note was created
pub timestamp: i64, pub time: chrono::DateTime<chrono::Utc>,
/// Note Content /// Note Content
pub content: String, pub content: String,
/// Associated flow /// Associated flow
pub on_flow: Reference, pub on_flow: uuid::Uuid,
} }
impl FlowNote { impl FlowNote {
pub async fn create(content: &str, flow: Reference) -> Self { pub async fn create(content: &str, flow: &uuid::Uuid) -> Self {
let s = Self { sqlx::query_as("INSERT INTO flow_notes (content, on_flow) VALUES ($1, $2) RETURNING *")
_id: uuid::Uuid::new_v4().to_string(), .bind(content)
timestamp: chrono::Utc::now().timestamp(), .bind(flow)
content: content.to_string(), .fetch_one(get_pg!())
on_flow: flow, .await
}; .unwrap()
}
s.insert().await.unwrap(); pub async fn find_of(id: &str) -> Vec<Self> {
sqlx::query_as("SELECT * FROM flow_notes WHERE on_flow = $1 ORDER BY time DESC")
s .bind(id)
.fetch_all(get_pg!())
.await
.unwrap()
} }
} }
impl ToAPI for FlowNote { impl ToAPI for FlowNote {
async fn api(&self) -> serde_json::Value { async fn api(&self) -> serde_json::Value {
json!({ json!({
"uuid": self._id, "uuid": self.id,
"timestamp": self.timestamp, "timestamp": self.time.timestamp(),
"content": self.content, "content": self.content,
"on_flow": self.on_flow.id() "on_flow": self.on_flow
}) })
} }
} }
impl Validate for FlowNote {
async fn validate(&self) -> Result<(), String> {
assert_reference_of!(self.on_flow, Flow);
Ok(())
}
}

View file

@ -1,13 +1,9 @@
use std::collections::HashMap; use std::collections::HashMap;
use mongod::{
derive::{Model, Referencable},
Model, Validate,
};
use mongodb::bson::doc;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use crate::get_pg;
use crate::transaction::Transaction; use crate::transaction::Transaction;
use crate::variant::Variant; use crate::variant::Variant;
@ -24,10 +20,10 @@ use crate::variant::Variant;
/// This struct serves as a blueprint for describing individual items within /// This struct serves as a blueprint for describing individual items within
/// a larger inventory or database of physical goods. It includes fields for /// a larger inventory or database of physical goods. It includes fields for
/// the name of the item and its category. /// the name of the item and its category.
#[derive(Debug, Clone, Serialize, Deserialize, Model, Referencable)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Item { pub struct Item {
/// The ID /// The ID
pub _id: String, pub id: String,
/// The name of the Item /// The name of the Item
pub name: String, pub name: String,
/// Category of the Item /// Category of the Item
@ -37,12 +33,6 @@ pub struct Item {
pub variants: HashMap<String, Variant>, pub variants: HashMap<String, Variant>,
} }
impl Validate for Item {
async fn validate(&self) -> Result<(), String> {
Ok(())
}
}
impl Item { impl Item {
/// Creates a new `Item` from a parsed markdown document /// Creates a new `Item` from a parsed markdown document
pub fn new(doc: &mdq::Document) -> Self { pub fn new(doc: &mdq::Document) -> Self {
@ -93,7 +83,7 @@ impl Item {
} }
Self { Self {
_id: id, id,
name, name,
category, category,
variants, variants,
@ -113,33 +103,23 @@ impl Item {
} }
pub async fn inventory(&self) -> Vec<Transaction> { pub async fn inventory(&self) -> Vec<Transaction> {
let filter = doc! { sqlx::query_as("SELECT * FROM transactions WHERE item = $1 AND consumed_timestamp IS NULL ORDER BY created DESC")
"item": &self._id, .bind(&self.id)
"consumed": { "$not": { "$type": "object" } } .fetch_all(get_pg!()).await.unwrap()
};
Transaction::find(filter, None, None).await.unwrap()
} }
pub async fn inventory_by_origin(&self, origin: &str) -> Vec<Transaction> { pub async fn inventory_by_origin(&self, origin: &str) -> Vec<Transaction> {
let filter = doc! { sqlx::query_as("SELECT * FROM transactions WHERE item = $1 AND consumed_timestamp IS NULL AND origin = $2 ORDER BY created DESC")
"item": &self._id, .bind(&self.id)
"consumed": { "$not": { "$type": "object" } }, .bind(origin)
"origin": origin .fetch_all(get_pg!()).await.unwrap()
};
Transaction::find(filter, None, None).await.unwrap()
} }
pub async fn consumed_by_destination(&self, destination: &str) -> Vec<Transaction> { pub async fn consumed_by_destination(&self, destination: &str) -> Vec<Transaction> {
let filter = doc! { sqlx::query_as("SELECT * FROM transactions WHERE item = $1 AND consumed_timestamp IS NOT NULL AND destination = $2 ORDER BY created DESC")
"item": &self._id, .bind(&self.id)
"consumed": { .bind(destination)
"destination": destination .fetch_all(get_pg!()).await.unwrap()
}
};
Transaction::find(filter, None, None).await.unwrap()
} }
pub fn api_json(&self) -> serde_json::Value { pub fn api_json(&self) -> serde_json::Value {
@ -150,7 +130,7 @@ impl Item {
.collect(); .collect();
json!({ json!({
"uuid": self._id, "uuid": self.id,
"name": self.name, "name": self.name,
"category": self.category, "category": self.category,
"variants": variants "variants": variants

View file

@ -5,6 +5,7 @@ use std::{
use serde::Deserialize; use serde::Deserialize;
#[derive(Debug)]
pub struct JSONStore<T> { pub struct JSONStore<T> {
documents: HashMap<String, T>, documents: HashMap<String, T>,
} }

View file

@ -1,18 +1,14 @@
use futures::FutureExt;
use mongod::{
derive::{Model, Referencable},
Model, ToAPI, Validate,
};
use mongodb::bson::doc;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use crate::{get_locations, routes::ToAPI};
/// A Storage Location /// A Storage Location
#[derive(Debug, Clone, Serialize, Deserialize, Model, Referencable)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Location { pub struct Location {
/// UUID /// UUID
#[serde(default)] #[serde(default)]
pub _id: String, pub id: String,
/// Name /// Name
pub name: String, pub name: String,
/// Parent /// Parent
@ -27,23 +23,18 @@ pub struct StorageConditions {
pub temperature: i64, pub temperature: i64,
} }
impl Validate for Location {
async fn validate(&self) -> Result<(), String> {
Ok(())
}
}
impl Location { impl Location {
/// Recursively get the conditions of a location. This inherits from parent locations. /// Recursively get the conditions of a location. This inherits from parent locations.
pub fn conditions_rec(&self) -> futures::future::BoxFuture<'_, Option<StorageConditions>> { pub fn conditions_rec(&self) -> Option<StorageConditions> {
async move { let locations = get_locations!();
if let Some(cond) = &self.conditions { if let Some(cond) = &self.conditions {
return Some(cond.clone()); return Some(cond.clone());
} }
if let Some(parent) = &self.parent { if let Some(parent) = &self.parent {
if let Some(parent_loc) = Location::get(parent).await { if let Some(parent_loc) = locations.get(parent) {
if let Some(cond) = parent_loc.conditions_rec().await { if let Some(cond) = parent_loc.conditions_rec() {
return Some(cond); return Some(cond);
} }
} }
@ -51,49 +42,45 @@ impl Location {
None None
} }
.boxed()
}
// Get direct children // Get direct children
pub async fn children_direct(&self) -> Vec<Location> { pub fn children_direct(&self) -> Vec<Location> {
Location::find(doc! { "parent": self._id.clone()}, None, None) let mut ret = Vec::new();
.await
.unwrap() let locations = get_locations!();
for loc in locations.keys() {
let loc = locations.get(loc).unwrap();
if *loc.parent.as_ref().unwrap_or(&String::new()) == self.id {
ret.push(loc.clone());
}
}
ret
} }
// Get all children locations // Get all children locations
pub fn children_recursive(&self) -> futures::future::BoxFuture<'_, Vec<Location>> { pub fn children_recursive(&self) -> Vec<Location> {
async move {
let mut all = Vec::new(); let mut all = Vec::new();
let direct = self.children_direct().await; let direct = self.children_direct();
all.extend_from_slice(&direct); all.extend_from_slice(&direct);
for loc in direct { for loc in direct {
let sub = loc.children_recursive().await; let sub = loc.children_recursive();
all.extend_from_slice(&sub); all.extend_from_slice(&sub);
} }
all all
} }
.boxed()
}
} }
impl ToAPI for Location { impl ToAPI for Location {
async fn api(&self) -> serde_json::Value { async fn api(&self) -> serde_json::Value {
json!({ json!({
"id": self._id, "id": self.id,
"name": self.name, "name": self.name,
"parent": self.parent, "parent": self.parent,
"conditions": self.conditions_rec().await "conditions": self.conditions_rec()
}) })
} }
} }
impl Location {
pub async fn add(&mut self, id: &str) {
self._id = id.to_string();
self.insert_overwrite().await.unwrap();
}
}

View file

@ -4,6 +4,7 @@ use location::Location;
use rocket::routes as route; use rocket::routes as route;
use rocket::{http::Method, launch}; use rocket::{http::Method, launch};
use tokio::sync::OnceCell;
mod config; mod config;
mod db; mod db;
@ -16,6 +17,55 @@ mod routes;
mod transaction; mod transaction;
mod variant; mod variant;
pub static PG: OnceCell<sqlx::PgPool> = OnceCell::const_new();
#[macro_export]
macro_rules! get_pg {
() => {
if let Some(client) = $crate::PG.get() {
client
} else {
let client = sqlx::postgres::PgPoolOptions::new()
.max_connections(5)
.connect(&std::env::var("DATABASE_URL").unwrap())
.await
.unwrap();
$crate::PG.set(client).unwrap();
$crate::PG.get().unwrap()
}
};
}
pub static ITEMDB: OnceCell<db::ItemDB> = OnceCell::const_new();
#[macro_export]
macro_rules! get_itemdb {
() => {
if let Some(client) = $crate::ITEMDB.get() {
client
} else {
let itemdb = $crate::db::ItemDB::new("./itemdb").await;
$crate::ITEMDB.set(itemdb).unwrap();
$crate::ITEMDB.get().unwrap()
}
};
}
pub static LOCATIONS: OnceCell<JSONStore<Location>> = OnceCell::const_new();
#[macro_export]
macro_rules! get_locations {
() => {
if let Some(client) = $crate::LOCATIONS.get() {
client
} else {
let locations = $crate::JSONStore::new("./locations");
$crate::LOCATIONS.set(locations).unwrap();
$crate::LOCATIONS.get().unwrap()
}
};
}
// ░░░░░░░░░░▀▀▀██████▄▄▄░░░░░░░░░░ // ░░░░░░░░░░▀▀▀██████▄▄▄░░░░░░░░░░
// ░░░░░░░░░░░░░░░░░▀▀▀████▄░░░░░░░ // ░░░░░░░░░░░░░░░░░▀▀▀████▄░░░░░░░
// ░░░░░░░░░░▄███████▀░░░▀███▄░░░░░ // ░░░░░░░░░░▄███████▀░░░▀███▄░░░░░
@ -49,19 +99,11 @@ async fn rocket() -> _ {
.expect("error creating CORS options"); .expect("error creating CORS options");
let config = config::get_config(); let config = config::get_config();
let itemdb = db::ItemDB::new("./itemdb").await; let itemdb = get_itemdb!();
let mut locations: JSONStore<Location> = JSONStore::new("./locations"); let locations = get_locations!();
let mut flows: JSONStore<FlowInfo> = JSONStore::new("./flows"); let flows: JSONStore<FlowInfo> = JSONStore::new("./flows");
integrity::verify_integrity(&config, &flows, &locations, &itemdb).await; integrity::verify_integrity(&config, &flows, &locations, &itemdb).await;
for location in &mut *locations {
location.1.add(location.0).await;
}
for flow in &mut *flows {
flow.1.add(flow.0).await;
}
rocket::build() rocket::build()
.mount( .mount(
"/", "/",

View file

@ -1,18 +1,16 @@
use std::collections::HashMap;
use mongod::{reference_of, vec_to_api, Model, Referencable, Sort, ToAPI};
use mongodb::bson::doc;
use rocket::{get, post, serde::json::Json, State}; use rocket::{get, post, serde::json::Json, State};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use std::{collections::HashMap, str::FromStr};
use crate::{ use crate::{
check_auth, check_auth,
config::Config, config::Config,
flow::{Flow, FlowInfo, FlowNote}, flow::{Flow, FlowInfo, FlowNote},
get_pg,
json_store::JSONStore, json_store::JSONStore,
routes::{api_error, FallibleApiResponse, Token}, routes::{api_error, vec_to_api, FallibleApiResponse, ToAPI, Token},
transaction::{Price, Transaction}, transaction::Transaction,
}; };
use super::{item::SupplyForm, ApiError}; use super::{item::SupplyForm, ApiError};
@ -42,14 +40,9 @@ pub async fn active_flows_route(
let flowinfo = flows.get(id).ok_or_else(|| api_error("Flow not found"))?; let flowinfo = flows.get(id).ok_or_else(|| api_error("Flow not found"))?;
let flow = Flow::find( let flow: Vec<Flow> = sqlx::query_as("SELECT * FROM flows WHERE kind = $1 AND ended IS NULL")
doc! { .bind(&flowinfo.id)
"kind": flowinfo.reference(), .fetch_all(get_pg!())
"done": { "$not": { "$type": "object" } }
},
None,
None,
)
.await .await
.unwrap(); .unwrap();
@ -85,7 +78,8 @@ pub async fn create_flow(
// verify valid input transactions // verify valid input transactions
if let Some(input) = &form.input { if let Some(input) = &form.input {
for t in input { for t in input {
let t = Transaction::get(t) let t =
Transaction::get(&uuid::Uuid::from_str(t).map_err(|_| api_error("Invalid UUID"))?)
.await .await
.ok_or_else(|| api_error(&format!("No such transaction {t}")))?; .ok_or_else(|| api_error(&format!("No such transaction {t}")))?;
@ -109,14 +103,13 @@ pub async fn create_flow(
if input_ref.is_empty() { if input_ref.is_empty() {
None None
} else { } else {
Some(input_ref.iter().map(|x| x.reference()).collect()) Some(input_ref.iter().map(|x| x.id).collect())
}, },
) )
.await; .await;
for t in input_ref { for t in input_ref {
t.consume(Price::zero(), &format!("flow::{kind}::{}", flow._id)) t.consume(0.0, &format!("flow::{kind}::{}", flow.id)).await;
.await;
} }
Ok(flow) Ok(flow)
@ -139,12 +132,12 @@ pub async fn create_flow_route(
flows: &State<JSONStore<FlowInfo>>, flows: &State<JSONStore<FlowInfo>>,
) -> FallibleApiResponse { ) -> FallibleApiResponse {
let flow = create_flow(id, flows, &form).await?; let flow = create_flow(id, flows, &form).await?;
Ok(json!({"uuid": flow._id })) Ok(json!({"uuid": flow.id }))
} }
#[get("/flow/<id>")] #[get("/flow/<id>")]
pub async fn flow_api_route(id: &str) -> FallibleApiResponse { pub async fn flow_api_route(id: &str) -> FallibleApiResponse {
let flow = Flow::get(id) let flow = Flow::get(&uuid::Uuid::from_str(id).map_err(|_| api_error("Invalid UUID"))?)
.await .await
.ok_or_else(|| api_error("No such flow"))?; .ok_or_else(|| api_error("No such flow"))?;
Ok(flow.api().await) Ok(flow.api().await)
@ -152,11 +145,11 @@ pub async fn flow_api_route(id: &str) -> FallibleApiResponse {
#[post("/flow/<id>/end", data = "<form>")] #[post("/flow/<id>/end", data = "<form>")]
pub async fn end_flow_route(id: &str, form: Json<EndFlow>) -> FallibleApiResponse { pub async fn end_flow_route(id: &str, form: Json<EndFlow>) -> FallibleApiResponse {
let flow = Flow::get(id) let flow = Flow::get(&uuid::Uuid::from_str(id).map_err(|_| api_error("Invalid UUID"))?)
.await .await
.ok_or_else(|| api_error("Flow not found"))?; .ok_or_else(|| api_error("Flow not found"))?;
if flow.done.is_some() { if flow.ended.is_some() {
return Err(api_error("Flow already ended")); return Err(api_error("Flow already ended"));
} }
@ -175,17 +168,17 @@ pub async fn continue_flow_route(
flows: &State<JSONStore<FlowInfo>>, flows: &State<JSONStore<FlowInfo>>,
form: Json<CreateFlow>, form: Json<CreateFlow>,
) -> FallibleApiResponse { ) -> FallibleApiResponse {
let this_flow = Flow::get(id) let this_flow = Flow::get(&uuid::Uuid::from_str(id).map_err(|_| api_error("Invalid UUID"))?)
.await .await
.ok_or_else(|| api_error("Flow not found"))?; .ok_or_else(|| api_error("Flow not found"))?;
if this_flow.done.is_some() { if this_flow.ended.is_some() {
return Err(api_error("Flow already ended")); return Err(api_error("Flow already ended"));
} }
// create next flow // create next flow
let next_kind = flows let next_kind = flows
.get(this_flow.kind.id()) .get(&this_flow.kind)
.ok_or_else(|| api_error("Flow not found"))? .ok_or_else(|| api_error("Flow not found"))?
.next .next
.clone() .clone()
@ -195,20 +188,12 @@ pub async fn continue_flow_route(
// end current flow // end current flow
this_flow.continue_next(&next_flow).await; this_flow.continue_next(&next_flow).await;
Ok(json!({"uuid": next_flow._id})) Ok(json!({"uuid": next_flow.id}))
} }
#[get("/flow/<id>/notes")] #[get("/flow/<id>/notes")]
pub async fn flow_notes_route(id: &str) -> FallibleApiResponse { pub async fn flow_notes_route(id: &str) -> FallibleApiResponse {
let notes = FlowNote::find( let notes = FlowNote::find_of(id).await;
doc! {
"on_flow": reference_of!(Flow, id).ok_or_else(|| api_error("No such flow"))?
},
None,
Some(doc! { "timestamp": Sort::Descending }),
)
.await
.unwrap();
Ok(json!(vec_to_api(&notes).await)) Ok(json!(vec_to_api(&notes).await))
} }
@ -222,8 +207,8 @@ pub struct NoteAdd {
pub async fn create_flow_note_route(id: &str, form: Json<NoteAdd>) -> FallibleApiResponse { pub async fn create_flow_note_route(id: &str, form: Json<NoteAdd>) -> FallibleApiResponse {
let note = FlowNote::create( let note = FlowNote::create(
&form.content, &form.content,
reference_of!(Flow, id).ok_or_else(|| api_error("No such flow"))?, &uuid::Uuid::from_str(id).map_err(|_| api_error("Invalid UUID"))?,
) )
.await; .await;
Ok(json!({"uuid": note._id })) Ok(json!({"uuid": note.id }))
} }

View file

@ -1,14 +1,14 @@
use mongod::{Model, ToAPI}; use std::str::FromStr;
use rocket::serde::json::Json; use rocket::serde::json::Json;
use rocket::{get, post, State}; use rocket::{get, post, State};
use serde::Deserialize; use serde::Deserialize;
use serde_json::json; use serde_json::json;
use crate::check_auth;
use crate::config::{Config, Webhook}; use crate::config::{Config, Webhook};
use crate::item::Item; use crate::routes::{ToAPI, Token};
use crate::routes::Token;
use crate::variant::Variant; use crate::variant::Variant;
use crate::{check_auth, get_itemdb};
use crate::{ use crate::{
db::ItemDB, db::ItemDB,
routes::{api_error, FallibleApiResponse}, routes::{api_error, FallibleApiResponse},
@ -20,7 +20,7 @@ use super::{item_does_not_exist_error, variant_does_not_exist_error};
pub struct DemandForm { pub struct DemandForm {
uuid: String, uuid: String,
destination: String, destination: String,
price: String, price: f64,
} }
/// Consumes a Transaction with Price and Destination /// Consumes a Transaction with Price and Destination
@ -29,11 +29,8 @@ pub async fn demand_route(f: Json<DemandForm>, t: Token, c: &State<Config>) -> F
check_auth!(t, c); check_auth!(t, c);
let transaction = Variant::demand( let transaction = Variant::demand(
&f.uuid, &uuid::Uuid::from_str(&f.uuid).map_err(|_| api_error("Invalid UUID"))?,
f.price f.price,
.clone()
.try_into()
.map_err(|()| api_error("Price malformed"))?,
&f.destination, &f.destination,
) )
.await .await
@ -45,8 +42,8 @@ pub async fn demand_route(f: Json<DemandForm>, t: Token, c: &State<Config>) -> F
} }
if let Some(url) = &hook.item_below_minimum { if let Some(url) = &hook.item_below_minimum {
let variant = Item::get(&transaction.item) let variant = get_itemdb!()
.await .get_item(&transaction.item)
.unwrap() .unwrap()
.variant(&transaction.variant) .variant(&transaction.variant)
.unwrap(); .unwrap();

View file

@ -1,6 +1,5 @@
use std::collections::HashMap; use std::collections::HashMap;
use mongod::{vec_to_api, ToAPI};
use rocket::{get, State}; use rocket::{get, State};
use serde_json::json; use serde_json::json;
@ -9,7 +8,7 @@ use crate::{
config::Config, config::Config,
json_store::JSONStore, json_store::JSONStore,
location::Location, location::Location,
routes::{api_error, FallibleApiResponse, Token}, routes::{api_error, vec_to_api, FallibleApiResponse, ToAPI, Token},
transaction::Transaction, transaction::Transaction,
}; };
@ -121,11 +120,6 @@ pub async fn location_inventory(
} }
Ok(json!( Ok(json!(
vec_to_api( vec_to_api(&Transaction::in_location(location).await).await
&Transaction::in_location(location)
.await
.ok_or_else(|| api_error("No such location"))?
)
.await
)) ))
} }

View file

@ -4,12 +4,11 @@ mod location;
mod stat; mod stat;
mod supply; mod supply;
use std::str::FromStr;
pub use demand::*; pub use demand::*;
pub use error::*; pub use error::*;
pub use location::*; pub use location::*;
use mongod::reference_of;
use mongod::Model;
use mongod::ToAPI;
use rocket::post; use rocket::post;
use rocket::serde::json::Json; use rocket::serde::json::Json;
use serde::Deserialize; use serde::Deserialize;
@ -25,7 +24,9 @@ use crate::check_auth;
use crate::config::Config; use crate::config::Config;
use crate::db::get_items_without_min_satisfied; use crate::db::get_items_without_min_satisfied;
use crate::db::ItemDB; use crate::db::ItemDB;
use crate::location::Location; use crate::get_locations;
use crate::get_pg;
use crate::routes::ToAPI;
use crate::routes::Token; use crate::routes::Token;
use crate::transaction::Transaction; use crate::transaction::Transaction;
@ -91,7 +92,9 @@ pub async fn transaction_route(
) -> FallibleApiResponse { ) -> FallibleApiResponse {
check_auth!(t, c); check_auth!(t, c);
let t = Transaction::get(transaction) let t = Transaction::get(
&uuid::Uuid::from_str(&transaction).map_err(|_| api_error("Invalid UUID"))?,
)
.await .await
.ok_or_else(|| api_error("No transaction with this UUID"))?; .ok_or_else(|| api_error("No transaction with this UUID"))?;
Ok(t.api().await) Ok(t.api().await)
@ -184,16 +187,16 @@ pub struct MoveTransaction {
#[post("/transaction/<id>/move", data = "<form>")] #[post("/transaction/<id>/move", data = "<form>")]
pub async fn move_transaction_route(id: &str, form: Json<MoveTransaction>) -> FallibleApiResponse { pub async fn move_transaction_route(id: &str, form: Json<MoveTransaction>) -> FallibleApiResponse {
let new_loc = &form.to; let new_loc = &form.to;
Transaction::get(id) let locations = get_locations!();
.await
.ok_or_else(|| api_error("No such transaction"))? if !locations.contains_key(new_loc) {
.change() return Err(api_error("No such location"));
.location(if form.to.is_empty() { }
None
} else { sqlx::query("UPDATE transactions SET location = $1 WHERE id = $2")
Some(reference_of!(Location, new_loc).ok_or_else(|| api_error("No such location"))?) .bind(new_loc)
}) .bind(uuid::Uuid::from_str(id).map_err(|_| api_error("Invalid UUID"))?)
.update() .execute(get_pg!())
.await .await
.unwrap(); .unwrap();

View file

@ -73,7 +73,7 @@ pub async fn item_stat_route(
let item_var = itemdb.get_item(&item).unwrap().variant(var).unwrap(); let item_var = itemdb.get_item(&item).unwrap().variant(var).unwrap();
for t in item_var.inventory().await { for t in item_var.inventory().await {
transaction_count += 1; transaction_count += 1;
total_price += t.price.value; total_price += t.price;
} }
} }
} }

View file

@ -1,4 +1,3 @@
use mongod::ToAPI;
use rocket::serde::json::Json; use rocket::serde::json::Json;
use rocket::{get, post, State}; use rocket::{get, post, State};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -6,7 +5,7 @@ use serde_json::json;
use crate::check_auth; use crate::check_auth;
use crate::config::{Config, Webhook}; use crate::config::{Config, Webhook};
use crate::routes::Token; use crate::routes::{vec_to_api, ToAPI, Token};
use crate::{ use crate::{
db::ItemDB, db::ItemDB,
routes::{api_error, FallibleApiResponse}, routes::{api_error, FallibleApiResponse},
@ -18,7 +17,7 @@ use super::{item_does_not_exist_error, variant_does_not_exist_error};
pub struct SupplyForm { pub struct SupplyForm {
pub item: String, pub item: String,
pub variant: String, pub variant: String,
pub price: String, pub price: f64,
pub origin: Option<String>, pub origin: Option<String>,
pub location: Option<String>, pub location: Option<String>,
pub note: Option<String>, pub note: Option<String>,
@ -42,10 +41,7 @@ pub async fn supply_route(
let transaction = variant let transaction = variant
.supply( .supply(
form.price form.price,
.clone()
.try_into()
.map_err(|()| api_error("Price malformed"))?,
form.origin.as_deref(), form.origin.as_deref(),
form.location.as_deref(), form.location.as_deref(),
form.note.as_deref(), form.note.as_deref(),
@ -58,7 +54,7 @@ pub async fn supply_route(
} }
} }
Ok(json!({"uuid": transaction._id})) Ok(json!({"uuid": transaction.id}))
} }
/// Returns a list of Transaction UUIDs for the Item Variant /// Returns a list of Transaction UUIDs for the Item Variant
@ -104,7 +100,7 @@ pub async fn inventory_route(
item.inventory().await item.inventory().await
}; };
Ok(json!(mongod::vec_to_api(&transactions).await)) Ok(json!(vec_to_api(&transactions).await))
} }
/// Returns current active Transactions for Item Variant /// Returns current active Transactions for Item Variant
@ -126,7 +122,7 @@ pub async fn inventory_route_variant(
let transactions = variant.inventory().await; let transactions = variant.inventory().await;
Ok(json!(mongod::vec_to_api(&transactions).await)) Ok(json!(vec_to_api(&transactions).await))
} }
/// Returns statistics for the Item Variant /// Returns statistics for the Item Variant

View file

@ -37,3 +37,20 @@ impl<'r> FromRequest<'r> for Token {
} }
} }
} }
/// A trait to generate a Model API representation in JSON format.
pub trait ToAPI: Sized {
/// Generate public API JSON
fn api(&self) -> impl std::future::Future<Output = serde_json::Value>;
}
/// Converts a slice of items implementing the `ToAPI` trait into a `Vec` of JSON values.
pub async fn vec_to_api(items: &[impl ToAPI]) -> Vec<serde_json::Value> {
let mut ret = Vec::with_capacity(items.len());
for e in items {
ret.push(e.api().await);
}
ret
}

View file

@ -1,109 +1,85 @@
use futures::StreamExt; use futures::StreamExt;
use mongod::{
assert_reference_of,
derive::{Model, Referencable},
reference_of, Model, Referencable, Reference, Sort, Validate,
};
use mongodb::bson::doc;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use sqlx::prelude::FromRow;
use crate::{item::Item, location::Location}; use crate::{get_itemdb, get_locations, get_pg, routes::ToAPI};
// todo : produced / consumed by flow field? // todo : produced / consumed by flow field?
/// A Transaction of an Item Variant /// A Transaction of an Item Variant
#[derive(Debug, Clone, Serialize, Deserialize, Model, Referencable)] #[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
pub struct Transaction { pub struct Transaction {
/// UUID /// UUID
pub _id: String, pub id: uuid::Uuid,
/// Associated Item /// Associated Item
pub item: String, pub item: String,
/// Associated Variant /// Associated Variant
pub variant: String, pub variant: String,
/// Price of obtaining the Item /// Price of obtaining the Item
pub price: Price, pub price: f64,
/// Origin of the Item /// Origin of the Item
pub origin: Option<String>, pub origin: Option<String>,
/// The location of the Item /// The location of the Item
pub location: Option<Reference>, pub location: Option<String>,
/// Info on consumption of the Item
pub consumed: Option<Consumed>,
/// Notes on Transaction /// Notes on Transaction
pub note: Option<String>, pub note: Option<String>,
/// Timestamp of the Transaction /// Timestamp of the Transaction
pub timestamp: i64, pub created: chrono::DateTime<chrono::Utc>,
}
impl Validate for Transaction {
async fn validate(&self) -> Result<(), String> {
if let Some(location) = &self.location {
assert_reference_of!(location, Location);
}
Ok(())
}
}
/// Information about consumed Items
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct Consumed {
/// Destination of the Item or who consumed it /// Destination of the Item or who consumed it
pub destination: String, pub destination: Option<String>,
/// Price the Item was exported or consumed at /// Price the Item was exported or consumed at
pub price: Price, pub consumed_price: Option<f64>,
/// Timestamp of Consumption /// Timestamp of Consumption
pub timestamp: i64, pub consumed_timestamp: Option<chrono::DateTime<chrono::Utc>>,
} }
impl Transaction { impl Transaction {
pub async fn new( pub async fn new(
item: &str, item: &str,
variant: &str, variant: &str,
price: Price, price: f64,
origin: Option<&str>, origin: Option<&str>,
location: Option<&str>, location: Option<&str>,
note: Option<&str>, note: Option<&str>,
) -> Self { ) -> Self {
Self { sqlx::query_as("INSERT INTO transactions (item, variant, price, origin, location, note) VALUES ($1, $2, $3, $4, $5, $6) RETURNING *")
_id: uuid::Uuid::new_v4().to_string(), .bind(item)
item: item.to_string(), .bind(variant)
variant: variant.to_string(), .bind(price)
price, .bind(origin)
consumed: None, .bind(location)
origin: origin.map(std::string::ToString::to_string), .bind(note)
location: if let Some(location) = location { .fetch_one(get_pg!()).await.unwrap()
reference_of!(Location, location)
} else {
None
},
note: note.map(|x| x.to_string()),
timestamp: chrono::Utc::now().timestamp(),
}
} }
/// Consumes the Item with `price` and `destination` pub async fn get(id: &uuid::Uuid) -> Option<Self> {
pub async fn consume(self, price: Price, destination: &str) -> Self { sqlx::query_as("SELECT * FROM transactions WHERE id = $1")
self.change() .bind(id)
.consumed(Some(Consumed { .fetch_optional(get_pg!())
destination: destination.to_string(),
price,
timestamp: chrono::Utc::now().timestamp(),
}))
.update()
.await .await
.unwrap() .unwrap()
} }
/// Consumes the Item with `price` and `destination`
pub async fn consume(self, price: f64, destination: &str) -> Self {
sqlx::query_as(
"UPDATE transactions SET consumed_timestamp = current_timestamp, consumed_price = $1, destination = $2 WHERE id = $3 RETURNING *")
.bind(price)
.bind(destination)
.bind(&self.id)
.fetch_one(get_pg!()).await.unwrap()
}
pub async fn is_expired_at(&self, time: i64) -> bool { pub async fn is_expired_at(&self, time: i64) -> bool {
if let Some(expiry) = Item::get(&self.item) if let Some(expiry) = get_itemdb!()
.await .get_item(&self.item)
.unwrap() .unwrap()
.variant(&self.variant) .variant(&self.variant)
.unwrap() .unwrap()
.expiry .expiry
{ {
let date_added = self.timestamp; let date_added = self.created.timestamp();
let expiration_ts = expiry * 24 * 60 * 60; let expiration_ts = expiry * 24 * 60 * 60;
@ -120,15 +96,16 @@ impl Transaction {
} }
pub async fn is_expired(&self) -> bool { pub async fn is_expired(&self) -> bool {
if self.consumed.is_some() { if self.consumed_timestamp.is_some() {
if let Some(expiry) = Item::get(&self.item) if let Some(expiry) = get_itemdb!()
.await .get_item(&self.item)
.unwrap() .unwrap()
.variant(&self.variant) .variant(&self.variant)
.unwrap() .unwrap()
.expiry .expiry
{ {
let time_around = self.timestamp - self.consumed.as_ref().unwrap().timestamp; let time_around =
self.created.timestamp() - self.consumed_timestamp.unwrap().timestamp();
let expiration_ts = expiry * 24 * 60 * 60; let expiration_ts = expiry * 24 * 60 * 60;
return time_around > expiration_ts; return time_around > expiration_ts;
} else { } else {
@ -140,38 +117,24 @@ impl Transaction {
self.is_expired_at(current_time).await self.is_expired_at(current_time).await
} }
pub async fn in_location(l: &str) -> Option<Vec<Self>> { pub async fn in_location(l: &str) -> Vec<Self> {
let l = reference_of!(Location, l)?; sqlx::query_as(
Some( "SELECT * FROM transactions WHERE location = $1 AND consumed_timestamp IS NULL",
Self::find(
doc! { "location": l, "consumed": { "$not": { "$type": "object" } }},
None,
None,
) )
.bind(l)
.fetch_all(get_pg!())
.await .await
.unwrap(), .unwrap()
)
} }
pub async fn in_location_recursive(l: &str) -> Option<Vec<Self>> { pub async fn in_location_recursive(l: &str) -> Option<Vec<Self>> {
// get the children of this location // get the children of this location
let locations = Location::get(l).await?.children_recursive().await; let locations = get_locations!().get(l)?.children_recursive();
let l = reference_of!(Location, l)?; let mut transactions = Self::in_location(l).await;
let mut transactions = Self::find(
doc! { "location": l, "consumed": { "$not": { "$type": "object" } },},
None,
None,
)
.await
.unwrap();
for loc in locations { for loc in locations {
transactions.extend( transactions.extend(Self::in_location(&loc.id).await);
Self::find(doc! { "location": loc.reference(), "consumed": { "$not": { "$type": "object" } }}, None, None)
.await
.unwrap(),
);
} }
Some(transactions) Some(transactions)
@ -179,13 +142,10 @@ impl Transaction {
/// Get all Transactions which are not consumed and are expired /// Get all Transactions which are not consumed and are expired
pub async fn active_expired(days: Option<i64>) -> Vec<Self> { pub async fn active_expired(days: Option<i64>) -> Vec<Self> {
let items = Self::find( let items: Vec<Self> = sqlx::query_as(
doc! { "SELECT * FROM transactions WHERE consumed_timestamp IS NULL ORDER BY created DESC",
"consumed": { "$not": { "$type": "object" } }
},
None,
Some(doc! { "timestamp": Sort::Descending }),
) )
.fetch_all(get_pg!())
.await .await
.unwrap(); .unwrap();
@ -210,67 +170,35 @@ impl Transaction {
} }
} }
impl mongod::ToAPI for Transaction { impl ToAPI for Transaction {
async fn api(&self) -> serde_json::Value { async fn api(&self) -> serde_json::Value {
let location = if let Some(loc) = &self.location { let location = if let Some(loc) = &self.location {
Some(loc.get::<Location>().await.api().await) Some(get_locations!().get(loc).unwrap().api().await)
} else {
None
};
let consumed = if self.consumed_timestamp.is_some() {
Some(json!({
"destination": self.destination,
"price": self.consumed_price,
"timestamp": self.consumed_timestamp.unwrap().timestamp()
}))
} else { } else {
None None
}; };
json!({ json!({
"uuid": self._id, "uuid": self.id,
"item": self.item, "item": self.item,
"variant": self.variant, "variant": self.variant,
"price": self.price, "price": self.price,
"origin": self.origin, "origin": self.origin,
"location": location, "location": location,
"timestamp": self.timestamp, "timestamp": self.created.timestamp(),
"consumed": self.consumed, "consumed": consumed,
"note": self.note, "note": self.note,
"expired": self.is_expired().await "expired": self.is_expired().await
}) })
} }
} }
/// Economic Price
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct Price {
/// Value of the currency
pub value: f64,
/// Kind of currency
pub currency: String,
}
impl Price {
pub fn new(value: f64, currency: &str) -> Self {
Self {
value,
currency: currency.to_string(),
}
}
pub fn zero() -> Self {
Self {
value: 0.00,
currency: String::new(),
}
}
fn parse(price: &str) -> Option<Self> {
let (value, currency) = price.split_once(' ')?;
Some(Self {
value: value.parse().ok()?,
currency: currency.to_string(),
})
}
}
impl TryFrom<String> for Price {
type Error = ();
fn try_from(value: String) -> Result<Self, Self::Error> {
Self::parse(&value).ok_or(())
}
}

View file

@ -1,16 +1,9 @@
use std::collections::HashMap; use std::collections::HashMap;
use futures::StreamExt;
use mongod::{Model, Sort};
use mongodb::bson::doc;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use crate::transaction::{Price, Transaction}; use crate::{get_pg, transaction::Transaction};
pub fn sort_by_timestamp() -> mongodb::bson::Document {
doc! { "timestamp": mongod::Sort::Descending }
}
pub fn timestamp_range(year: i32, month: u32) -> (i64, i64) { pub fn timestamp_range(year: i32, month: u32) -> (i64, i64) {
let d = chrono::NaiveDate::from_ymd_opt(year, month, 0).unwrap(); let d = chrono::NaiveDate::from_ymd_opt(year, month, 0).unwrap();
@ -85,69 +78,51 @@ impl Variant {
/// Returns the IDs of Transactions from this Item Variant. /// Returns the IDs of Transactions from this Item Variant.
pub async fn supply_log(&self) -> Vec<String> { pub async fn supply_log(&self) -> Vec<String> {
let filter = doc! { let res: Vec<(uuid::Uuid,)> = sqlx::query_as(
"item": &self.item, "SELECT id FROM transactions WHERE item = $1 AND variant = $2 ORDER BY created DESC",
"variant": &self.variant )
}; .bind(&self.item)
.bind(&self.variant)
let result = Transaction::find_partial(filter, json!({}), None, None) .fetch_all(get_pg!())
.await .await
.unwrap(); .unwrap();
let mut ret = Vec::new(); res.into_iter().map(|x| x.0.to_string()).collect()
for doc in result {
ret.push(doc._id);
}
ret
} }
/// Returns the active Transaction of this Item Variant which are not yet consumed. /// Returns the active Transaction of this Item Variant which are not yet consumed.
pub async fn inventory(&self) -> Vec<Transaction> { pub async fn inventory(&self) -> Vec<Transaction> {
let filter = doc! { sqlx::query_as("SELECT * FROM transactions WHERE item = $1 AND variant = $2 AND consumed_timestamp IS NULL ORDER BY created DESC")
"item": &self.item, .bind(&self.item)
"variant": &self.variant, .bind(&self.variant)
"consumed": { "$not": { "$type": "object" } } .fetch_all(get_pg!()).await.unwrap()
};
Transaction::find(filter, None, None).await.unwrap()
} }
/// Returns the IDs of the Transactions from this Item Variant which are consumed. /// Returns the IDs of the Transactions from this Item Variant which are consumed.
pub async fn demand_log(&self, destination: Option<&str>) -> Vec<String> { pub async fn demand_log(&self, destination: Option<&str>) -> Vec<String> {
let filter = if let Some(dest) = destination { let res: Vec<(uuid::Uuid,)> = if let Some(destination) = destination {
doc! { sqlx::query_as(
"item": &self.item, "SELECT id FROM transactions WHERE item = $1 AND variant = $2 AND consumed_timestamp IS NOT NULL AND destination = $3 ORDER BY created DESC"
"variant": &self.variant, )
"consumed": { "destination": dest } .bind(&self.item)
} .bind(&self.variant)
.bind(destination)
.fetch_all(get_pg!()).await.unwrap()
} else { } else {
doc! { sqlx::query_as(
"item": &self.item, "SELECT id FROM transactions WHERE item = $1 AND variant = $2 AND consumed_timestamp IS NOT NULL ORDER BY created DESC"
"variant": &self.variant, )
"consumed": { "$type": "object" } .bind(&self.item)
} .bind(&self.variant)
.fetch_all(get_pg!()).await.unwrap()
}; };
res.into_iter().map(|x| x.0.to_string()).collect()
let result = Transaction::find_partial(filter, json!({}), None, None)
.await
.unwrap();
let mut ret = Vec::new();
for doc in result {
ret.push(doc._id);
} }
ret pub async fn demand(uuid: &uuid::Uuid, price: f64, destination: &str) -> Option<Transaction> {
}
pub async fn demand(uuid: &str, price: Price, destination: &str) -> Option<Transaction> {
// check if transaction exists // check if transaction exists
let mut t = Transaction::get(uuid).await?; let t = Transaction::get(uuid).await?;
t = t.consume(price, destination).await; Some(t.consume(price, destination).await)
Some(t)
} }
/// Records a supply transaction in the database. /// Records a supply transaction in the database.
@ -162,26 +137,22 @@ impl Variant {
/// Returns a UUID string representing the transaction. /// Returns a UUID string representing the transaction.
pub async fn supply( pub async fn supply(
&self, &self,
price: Price, price: f64,
origin: Option<&str>, origin: Option<&str>,
location: Option<&str>, location: Option<&str>,
note: Option<&str>, note: Option<&str>,
) -> Transaction { ) -> Transaction {
let t = Transaction::new(&self.item, &self.variant, price, origin, location, note).await; Transaction::new(&self.item, &self.variant, price, origin, location, note).await
t.insert().await.unwrap();
t
} }
/// Returns all Transactions of this Item Variant /// Returns all Transactions of this Item Variant
pub async fn get_all_transactions(&self) -> Vec<Transaction> { pub async fn get_all_transactions(&self) -> Vec<Transaction> {
let filter = doc! { sqlx::query_as(
"item": &self.item, "SELECT * FROM transactions WHERE item = $1 AND variant = $2 ORDER BY created DESC",
"variant": &self.variant )
}; .bind(&self.item)
.bind(&self.variant)
Transaction::find(filter, None, Some(doc! { "timestamp": Sort::Descending })) .fetch_all(get_pg!())
.await .await
.unwrap() .unwrap()
} }
@ -189,80 +160,57 @@ impl Variant {
pub async fn get_transaction_timeslice(&self, year: i32, month: u32) -> Vec<Transaction> { pub async fn get_transaction_timeslice(&self, year: i32, month: u32) -> Vec<Transaction> {
let (start, end) = timestamp_range(year, month); let (start, end) = timestamp_range(year, month);
Transaction::find( sqlx::query_as("SELECT * FROM transactions WHERE created BETWEEN to_timestamp($1) AND to_timestamp($2) ORDER BY created DESC")
doc! { .bind(start)
"timestamp": { .bind(end)
"$gte": start, .fetch_all(get_pg!()).await.unwrap()
"$lte": end
}
},
None,
Some(sort_by_timestamp()),
)
.await
.unwrap()
} }
pub async fn get_unique_origins(&self) -> Vec<String> { pub async fn get_unique_origins(&self) -> Vec<String> {
unique_flows( let res: Vec<(String,)> = sqlx::query_as("SELECT DISTINCT(origin) FROM transactions WHERE origin NOT LIKE 'flow::%' AND item = $1 AND variant = $2")
&Transaction::unique( .bind(&self.item)
doc! { .bind(&self.variant)
"item": &self.item, .fetch_all(get_pg!()).await.unwrap();
"variant": &self.variant res.into_iter().map(|x| x.0).collect()
},
"origin",
)
.await,
)
} }
pub async fn get_unique_destinations(&self) -> Vec<String> { pub async fn get_unique_destinations(&self) -> Vec<String> {
unique_flows( let res: Vec<(String,)> = sqlx::query_as("SELECT DISTINCT(destination) FROM transactions WHERE destination NOT LIKE 'flow::%' AND item = $1 AND variant = $2")
&Transaction::unique( .bind(&self.item)
doc! { .bind(&self.variant)
"item": &self.item, .fetch_all(get_pg!()).await.unwrap();
"variant": &self.variant res.into_iter().map(|x| x.0).collect()
},
"consumed.destination",
)
.await,
)
} }
pub async fn price_history_by_origin(&self, origin: &str, limit: Option<i64>) -> Vec<Price> { pub async fn price_history_by_origin(&self, origin: &str, limit: Option<i64>) -> Vec<f64> {
Transaction::find( let res: Vec<(f64,)> = sqlx::query_as(
doc! { &format!("SELECT price FROM transactions WHERE item = $1 AND variant = $2 AND origin = $3 ORDER BY created DESC {}", if let Some(limit) = limit {
"item": &self.item, format!("LIMIT {limit}")
"variant": &self.variant, } else { String::new() })
"origin": origin
},
limit,
Some(sort_by_timestamp()),
) )
.await .bind(&self.item)
.unwrap() .bind(&self.variant)
.into_iter() .bind(origin)
.map(|x| x.price) .fetch_all(get_pg!()).await.unwrap();
.collect() res.into_iter().map(|x| x.0).collect()
} }
pub async fn get_latest_price(&self, origin: Option<String>) -> Price { pub async fn get_latest_price(&self, origin: Option<String>) -> f64 {
let mut filter = doc! {
"item": &self.item,
"variant": &self.variant
};
if let Some(origin) = origin { if let Some(origin) = origin {
filter.insert("origin", origin); let res: (f64,) = sqlx::query_as("SELECT price FROM transactions WHERE item = $1 AND variant = $2 AND origin = $3 ORDER BY created DESC LIMIT 1")
.bind(&self.item)
.bind(&self.variant)
.bind(origin)
.fetch_one(get_pg!()).await.unwrap();
res.0
} else {
let res: (f64,) = sqlx::query_as("SELECT price FROM transactions WHERE item = $1 AND variant = $2 ORDER BY created DESC LIMIT 1")
.bind(&self.item)
.bind(&self.variant)
.bind(origin)
.fetch_one(get_pg!()).await.unwrap();
res.0
} }
Transaction::find(filter, Some(1), Some(sort_by_timestamp()))
.await
.unwrap()
.first()
.unwrap()
.price
.clone()
} }
/// Check if item variant is below minimum. Returns if this is the case and the number needed to fulfill minimum /// Check if item variant is below minimum. Returns if this is the case and the number needed to fulfill minimum
@ -280,8 +228,7 @@ impl Variant {
pub async fn stat(&self, full: bool) -> serde_json::Value { pub async fn stat(&self, full: bool) -> serde_json::Value {
let active_transactions = self.inventory().await; let active_transactions = self.inventory().await;
// fix : ignores currency let total_price: f64 = active_transactions.iter().map(|x| x.price).sum();
let total_price: f64 = active_transactions.iter().map(|x| x.price.value).sum();
if !full { if !full {
return json!({ return json!({
@ -290,13 +237,15 @@ impl Variant {
}); });
} }
let all_transactions = Transaction::find( let all_transactions: Vec<Transaction> = sqlx::query_as(
doc! { "item": &self.item, "variant": &self.variant}, "SELECT * FROM transactions WHERE item = $1 AND variant = $2 ORDER BY created DESC",
None,
None,
) )
.bind(&self.item)
.bind(&self.variant)
.fetch_all(get_pg!())
.await .await
.unwrap(); .unwrap();
let mut expired_count = 0.0; let mut expired_count = 0.0;
for t in &all_transactions { for t in &all_transactions {
@ -318,7 +267,7 @@ impl Variant {
.price_history_by_origin(&origin, None) .price_history_by_origin(&origin, None)
.await .await
.into_iter() .into_iter()
.map(|x| x.value) .map(|x| x)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let prices_len = prices.len() as f64; let prices_len = prices.len() as f64;
let prices_summed = prices.into_iter().reduce(|acc, e| acc + e).unwrap_or(0.0); let prices_summed = prices.into_iter().reduce(|acc, e| acc + e).unwrap_or(0.0);
@ -349,29 +298,3 @@ impl Variant {
}) })
} }
} }
pub fn unique_flows(i: &[String]) -> Vec<String> {
let mut unique_vec: Vec<String> = Vec::new();
for s in i {
// Check if the string starts with "flow::"
if let Some(suffix) = s.strip_prefix("flow::") {
// Extract the part after "flow::" and split on "::" to get the kind (ignoring id)
let parts: Vec<&str> = suffix.split("::").collect();
if let Some(kind) = parts.first() {
// Build the common prefix "flow::kind"
let common_prefix = format!("flow::{}", kind);
if !unique_vec.contains(&common_prefix) {
unique_vec.push(common_prefix);
}
}
} else {
// If the string doesn't start with "flow::", retain it
// Assumption: Except "flow::" values, everything should be already unique
unique_vec.push(s.to_string());
}
}
unique_vec
}