This commit is contained in:
JMARyA 2024-10-05 01:21:43 +02:00
parent 311b315990
commit 1979fc246e
Signed by: jmarya
GPG key ID: 901B2ADDF27C2263
16 changed files with 1644 additions and 1391 deletions

4
.gitignore vendored
View file

@ -1 +1,3 @@
/target
/target
/videos
/db

1842
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -3,22 +3,20 @@ name = "watchdogs"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
actix-files = "0.6.2"
actix-web = "4.4.0"
chrono = "0.4.31"
env_logger = "0.10.0"
hex = "0.4.3"
log = "0.4.20"
maud = "0.25.0"
rayon = "1.7.0"
regex = "1.9.5"
ring = "0.16.20"
rusqlite = "0.29.0"
serde = { version = "1.0.188", features = ["derive"] }
serde_json = "1.0.107"
tokio = { version = "1.32.0", features = ["full"] }
walkdir = "2.4.0"
web-base = "0.2.1"
chrono = { version = "0.4.38", features = ["serde"] }
futures = "0.3.30"
log = "0.4.20"
rocket = { version = "0.5.1", features = ["json"] }
rocket_cors = "0.6.0"
serde = { version = "1.0.195", features = ["derive"] }
serde_json = "1.0.111"
tokio = { version = "1.35.1", features = ["full"] }
uuid = { version = "1.8.0", features = ["v4", "serde"] }
sqlx = { version = "0.8", features = ["postgres", "runtime-tokio-native-tls", "derive", "uuid", "chrono", "json"] }

View file

@ -5,7 +5,7 @@ WORKDIR /app
RUN cargo build --release
FROM ubuntu
FROM debian:buster
RUN apt-get update && apt-get upgrade -y
RUN apt-get install -y ca-certificates openssl mkvtoolnix sqlite3

27
docker-compose.yml Normal file
View file

@ -0,0 +1,27 @@
version: '3'
services:
watchdogs:
build: .
ports:
- "8080:8000"
depends_on:
- postgres
volumes:
- ./videos:/videos # Video files
environment:
- "DATABASE_URL=postgres://user:pass@postgres/watchdogs"
- "RUST_LOG=info"
- "ROCKET_ADDRESS=0.0.0.0"
command: "/watchdogs /videos"
postgres:
image: timescale/timescaledb:latest-pg16
restart: always
ports:
- 5432:5432
volumes:
- ./db:/var/lib/postgresql/data/
environment:
- POSTGRES_USER=user
- POSTGRES_PASSWORD=pass
- POSTGRES_DB=watchdogs

View file

@ -6,27 +6,31 @@ CREATE TABLE IF NOT EXISTS "youtube_meta" (
"uploader_id" TEXT,
"duration" INTEGER,
"views" INTEGER,
"upload_date" TEXT,
"upload_date" DATE,
PRIMARY KEY("id")
);
CREATE TABLE IF NOT EXISTS "youtube_meta_tags" (
"youtube_id" TEXT NOT NULL,
"youtube_id" TEXT NOT NULL,
"tag" TEXT NOT NULL,
PRIMARY KEY("tag","youtube_id"),
PRIMARY KEY("youtube_id", "tag"),
FOREIGN KEY("youtube_id") REFERENCES "youtube_meta"("id")
);
CREATE TABLE IF NOT EXISTS "youtube_meta_categories" (
"youtube_id" TEXT NOT NULL,
"category" TEXT NOT NULL,
PRIMARY KEY("youtube_id", "category"),
FOREIGN KEY("youtube_id") REFERENCES "youtube_meta"("id")
);
CREATE TABLE IF NOT EXISTS "videos" (
"sha256" TEXT NOT NULL,
"directory" TEXT NOT NULL,
"path" TEXT NOT NULL,
"duration" INTEGER,
"title" TEXT,
"youtube_id" TEXT,
"youtube_id" TEXT,
PRIMARY KEY("sha256"),
FOREIGN KEY("youtube_id") REFERENCES "youtube_meta"("id")
);
CREATE TABLE IF NOT EXISTS "youtube_meta_categories" (
"youtube_id" TEXT NOT NULL,
"category" TEXT NOT NULL,
PRIMARY KEY("category","youtube_id"),
FOREIGN KEY("youtube_id") REFERENCES "youtube_meta"("id")
);

View file

@ -1,10 +1,5 @@
use rayon::prelude::IntoParallelIterator;
use rayon::prelude::ParallelIterator;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
use walkdir::WalkDir;
use func::is_video_file;
@ -12,232 +7,193 @@ pub use video::Video;
mod func;
mod video;
#[macro_export]
macro_rules! query_row_map {
($db:ident, $query:expr, $param:expr, $map_fn:expr) => {{
let mut state = $db.prepare($query).unwrap();
let r: Vec<_> = state
.query_map($param, $map_fn)
.unwrap()
.flatten()
.collect();
r
}};
}
#[derive(Debug, Clone)]
pub struct Library {
conn: Arc<Mutex<rusqlite::Connection>>,
conn: sqlx::PgPool,
}
impl Library {
pub fn new() -> Self {
pub async fn new() -> Self {
log::info!("Creating database connection");
let conn = Arc::new(Mutex::new(rusqlite::Connection::open("videos.db").unwrap()));
let conn = sqlx::postgres::PgPoolOptions::new()
.max_connections(5)
.connect(&std::env::var("DATABASE_URL").unwrap())
.await
.unwrap();
let s = Self { conn };
s.init_schema();
sqlx::migrate!("./migrations").run(&s.conn).await.unwrap();
s
}
}
// DB
impl Library {
pub fn init_schema(&self) {
let mut con = self.conn.lock().unwrap();
let tx = con.transaction().unwrap();
tx.execute_batch(include_str!("../schema.sql")).unwrap();
tx.commit().unwrap();
}
}
// Functions
impl Library {
// directories
pub fn get_directories(&self) -> Vec<String> {
let db = self.conn.lock().unwrap();
query_row_map!(db, "SELECT DISTINCT directory FROM videos;", [], |x| {
x.get::<usize, String>(0)
})
pub async fn get_directories(&self) -> Vec<String> {
let res: Vec<(String,)> = sqlx::query_as("SELECT DISTINCT(directory) FROM videos;")
.fetch_all(&self.conn)
.await
.unwrap();
res.into_iter().map(|x| x.0).collect()
}
pub fn get_directory_videos(&self, dir: &str) -> Vec<Video> {
let db = self.conn.lock().unwrap();
pub async fn get_directory_videos(&self, dir: &str) -> Vec<Video> {
let videos_ids: Vec<(String, )> = sqlx::query_as(
"SELECT sha256 FROM videos INNER JOIN youtube_meta ON youtube_meta.id = videos.youtube_id WHERE directory = ?1 ORDER BY youtube_meta.upload_date DESC;")
.bind(dir)
.fetch_all(&self.conn).await.unwrap();
let videos: Vec<_> = query_row_map!(
db,
"SELECT sha256 FROM videos INNER JOIN youtube_meta ON youtube_meta.id = videos.youtube_id WHERE directory = ?1 ORDER BY youtube_meta.upload_date DESC;",
&[dir],
|x| {
Ok(Video::from_hash(
&x.get::<usize, String>(0)?,
self.conn.clone(),
))
}
);
let mut videos: Vec<Video<'_>> = Vec::new();
for video in videos_ids {
videos.push(Video::from_hash(
&video.0,
&self.conn
));
}
videos
}
// YT
pub fn get_channel_name_yt(&self, id: &str) -> String {
let db = self.conn.lock().unwrap();
pub async fn get_channel_name_yt(&self, id: &str) -> String {
let name: (String,) =
sqlx::query_as("SELECT uploader_name FROM youtube_meta WHERE uploader_id = $1")
.bind(id)
.fetch_one(&self.conn)
.await
.unwrap();
let res: Vec<String> = query_row_map!(
db,
"SELECT uploader_name FROM youtube_meta WHERE uploader_id = ?1",
&[id],
|x| { x.get(0) }
);
res.first().unwrap().to_owned()
name.0
}
pub fn get_tags_yt(&self) -> Vec<String> {
let db = self.conn.lock().unwrap();
let tags: Vec<_> =
query_row_map!(db, "SELECT DISTINCT tag FROM youtube_meta_tags", [], |x| {
x.get(0)
});
tags
pub async fn get_tags_yt(&self) -> Vec<String> {
let res: Vec<(String,)> = sqlx::query_as("SELECT DISTINCT(tag) FROM youtube_meta_tags")
.fetch_all(&self.conn)
.await
.unwrap();
res.into_iter().map(|x| x.0).collect()
}
pub fn get_videos_by_tag_yt(&self, tag: &str) -> Vec<Video> {
let db = self.conn.lock().unwrap();
pub async fn get_videos_by_tag_yt(&self, tag: &str) -> Vec<Video> {
let videos_ids: Vec<(String, )> = sqlx::query_as(
"SELECT sha256 FROM youtube_meta_tags INNER JOIN youtube_meta ON youtube_meta_tags.youtube_id = youtube_meta.id INNER JOIN videos ON videos.youtube_id = youtube_meta.id WHERE tag = $1;")
.bind(tag)
.fetch_all(&self.conn).await.unwrap();
let mut videos: Vec<Video<'_>> = Vec::new();
let videos: Vec<_> = query_row_map!(
db,
"SELECT sha256 FROM youtube_meta_tags INNER JOIN youtube_meta ON youtube_meta_tags.youtube_id = youtube_meta.id INNER JOIN videos ON videos.youtube_id = youtube_meta.id WHERE tag = ?1;",
&[tag],
|x| {
Ok(Video::from_hash(
&x.get::<usize, String>(0)?,
self.conn.clone(),
))
for video in videos_ids {
videos.push(Video::from_hash(
&video.0,
&self.conn
));
}
);
videos
videos
}
pub fn get_channel_videos_yt(&self, id: &str) -> Vec<Video> {
let db = self.conn.lock().unwrap();
pub async fn get_channel_videos_yt(&self, id: &str) -> Vec<Video> {
let videos_ids: Vec<(String, )> = sqlx::query_as(
"SELECT sha256 FROM youtube_meta INNER JOIN videos ON youtube_meta.id = videos.youtube_id WHERE uploader_id = $1 ORDER BY youtube_meta.upload_date DESC;")
.bind(id)
.fetch_all(&self.conn).await.unwrap();
let videos: Vec<_> = query_row_map!(
db,
"SELECT sha256 FROM youtube_meta INNER JOIN videos ON youtube_meta.id = videos.youtube_id WHERE uploader_id = ?1 ORDER BY youtube_meta.upload_date DESC;",
&[id],
|x| {
Ok(Video::from_hash(
&x.get::<usize, String>(0)?,
self.conn.clone(),
))
let mut videos: Vec<Video<'_>> = Vec::new();
for video in videos_ids {
videos.push(Video::from_hash(
&video.0,
&self.conn
));
}
);
videos
videos
}
// videos
pub fn get_random_videos(&self, n: usize) -> Vec<Video> {
let db = self.conn.lock().unwrap();
pub async fn get_random_videos(&self, n: i64) -> Vec<Video> {
let videos_ids: Vec<(String, )> = sqlx::query_as(
"SELECT sha256 FROM videos ORDER BY RANDOM() LIMIT $1;")
.bind(n)
.fetch_all(&self.conn).await.unwrap();
query_row_map!(
db,
"SELECT sha256 FROM videos ORDER BY RANDOM() LIMIT ?1;",
[n],
|x| {
Ok(Video::from_hash(
&x.get::<usize, String>(0)?,
self.conn.clone(),
))
}
let mut videos: Vec<Video<'_>> = Vec::new();
for video in videos_ids {
videos.push(Video::from_hash(
&video.0,
&self.conn
));
}
videos
}
pub async fn get_video_by_hash(&self, hash: &str) -> Option<(String, Video)> {
let res: Vec<(String, Video)> = sqlx::query_as::<sqlx::Postgres, (String, String)>(
"SELECT sha256, directory FROM videos WHERE sha256 = $1"
)
}
pub fn get_video_by_hash(&self, hash: &str) -> Option<(String, Video)> {
let db = self.conn.lock().unwrap();
let res: Vec<(String, Video)> = query_row_map!(
db,
"SELECT sha256, directory FROM videos WHERE sha256 = ?1;",
&[hash],
|x| {
Ok((
x.get(1)?,
Video::from_hash(&x.get::<usize, String>(0)?, self.conn.clone()),
))
}
);
if !res.is_empty() {
return res.first().map(std::borrow::ToOwned::to_owned);
}
None
}
pub fn get_video_by_youtube_id(&self, id: &str) -> Option<(String, Video)> {
let db = self.conn.lock().unwrap();
let res = query_row_map!(
db,
"SELECT sha256, directory FROM videos WHERE youtube_id = ?1",
&[id],
|x| {
Ok((
x.get(1)?,
Video::from_hash(&x.get::<usize, String>(0)?, self.conn.clone()),
))
}
);
.bind(hash)
.fetch_all(&self.conn).await.unwrap().into_iter()
.map(|x| {
(x.1, Video::from_hash(&x.0, &self.conn))
}).collect();
if !res.is_empty() {
return res.first().map(std::borrow::ToOwned::to_owned);
}
None
}
pub fn has_path(&self, path: &Path) -> bool {
let db = self.conn.lock().unwrap();
let mut state = db
.prepare("SELECT path FROM videos WHERE path = ?1;")
.unwrap();
pub async fn get_video_by_youtube_id(&self, id: &str) -> Option<(String, Video)> {
let res: Vec<(String, Video<'_>)> = sqlx::query_as(
"SELECT sha256, directory FROM videos WHERE youtube_id = $1")
.bind(id)
.fetch_all(&self.conn).await.unwrap().into_iter().map(|x: (String, String)| {
(
x.1,
Video::from_hash(&x.0, &self.conn),
)
}).collect();
let x = state
.query_map([path.to_str().unwrap()], |x| {
let r: String = x.get(0)?;
Ok(r)
})
if !res.is_empty() {
return res.first().map(std::borrow::ToOwned::to_owned);
}
None
}
pub async fn has_path(&self, path: &Path) -> bool {
sqlx::query("SELECT path FROM videos WHERE path = $1")
.bind(path.display().to_string())
.fetch_optional(&self.conn)
.await
.unwrap()
.flatten()
.next()
.is_some();
x
.is_some()
}
// search
pub fn search_video(&self, query: &str, start: usize, n: usize) -> Vec<Video> {
let db = self.conn.lock().unwrap();
pub async fn search_video(&self, query: &str, start: i64, n: i64) -> Vec<Video> {
let query = format!("%{query}%");
query_row_map!(
db,
&format!(
r#"SELECT DISTINCT
sqlx::query_as(
r#"SELECT DISTINCT
vm.sha256,
( -- Calculate a score for the video based on matches
(ym.title LIKE ?1) +
(ym.description LIKE ?1) +
(ym.uploader_name LIKE ?1) +
(vm.directory LIKE ?1)
(ym.title LIKE $1) +
(ym.description LIKE $1) +
(ym.uploader_name LIKE $1) +
(vm.directory LIKE $1)
) AS score
FROM
youtube_meta AS ym
@ -246,41 +202,41 @@ impl Library {
LEFT JOIN
youtube_meta_tags AS ymt ON ym.id = ymt.youtube_id
WHERE
(ym.title LIKE ?1) OR
(ym.description LIKE ?1) OR
(ym.uploader_name LIKE ?1) OR
(vm.directory LIKE ?1) OR
(ymt.tag LIKE ?1)
(ym.title LIKE $1) OR
(ym.description LIKE $1) OR
(ym.uploader_name LIKE $1) OR
(vm.directory LIKE $1) OR
(ymt.tag LIKE $1)
ORDER BY
score DESC,
ym.upload_date DESC LIMIT {n} OFFSET {start};"#
),
&[&query],
|x| {
Ok(Video::from_hash(
&x.get::<usize, String>(0)?,
self.conn.clone(),
))
}
ym.upload_date DESC LIMIT $2 OFFSET $3;"#
)
.bind(query)
.bind(n)
.bind(start)
.fetch_all(&self.conn).await.unwrap().into_iter().map(|x: (String, i64)| {
Video::from_hash(
&x.0,
&self.conn
)
}).collect()
}
}
// video library scan
impl Library {
pub fn scan_dir(&self, dir: &PathBuf) {
pub async fn scan_dir(&self, dir: &PathBuf) {
log::info!("Scanning {dir:?}");
let lib = self.get_video_paths(dir);
let _: Vec<Video> = lib
.into_par_iter()
.map(|x| Video::insert_path_to_db(&self.conn.clone(), &x))
.collect();
let db = self.conn.lock().unwrap();
db.flush_prepared_statement_cache();
let lib = self.get_video_paths(dir).await;
for path in lib {
Video::insert_path_to_db(&self.conn, &path).await;
}
log::info!("Finished scanning {dir:?}");
}
fn get_video_paths(&self, dir: &PathBuf) -> Vec<PathBuf> {
async fn get_video_paths(&self, dir: &PathBuf) -> Vec<PathBuf> {
let mut videos: Vec<PathBuf> = vec![];
for entry in WalkDir::new(dir).follow_links(true) {
@ -292,7 +248,7 @@ impl Library {
if is_video_file(&file_name) {
let video_path = entry.path().to_path_buf();
if self.has_path(&video_path) {
if self.has_path(&video_path).await {
continue;
}

View file

@ -1,13 +1,13 @@
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
use serde_json::json;
use crate::library::func::calculate_sha256_hash;
use crate::query_row_map;
use crate::yt_meta;
// todo : optimize
#[derive(Debug, Default, Clone)]
pub struct Video {
pub struct Video<'a> {
directory: Option<String>,
path: Option<PathBuf>,
title: Option<String>,
@ -16,26 +16,23 @@ pub struct Video {
description: Option<String>,
uploader_name: Option<String>,
uploader_id: Option<String>,
duration: Option<usize>,
views: Option<usize>,
duration: Option<i64>,
views: Option<i64>,
categories: Option<Vec<String>>,
tags: Option<Vec<String>>,
upload_date: Option<String>,
db: Option<Arc<Mutex<rusqlite::Connection>>>,
upload_date: Option<chrono::NaiveDate>,
db: Option<&'a sqlx::PgPool>,
}
// Video properties
impl Video {
fn get_video_info(&mut self) {
impl<'a> Video<'a> {
async fn get_video_info(&mut self) {
log::info!("Fetching Video Metadata for {}", self.hash);
let db = self.db.as_mut().unwrap().lock().unwrap();
let res: Vec<(String, String)> = query_row_map!(
db,
"SELECT title, path FROM videos WHERE sha256 = ?1",
&[&self.hash],
|x| { Ok((x.get(0)?, x.get(1)?)) }
);
let res: Vec<(String, String)> = sqlx::query_as(
"SELECT title, path FROM videos WHERE sha256 = $1")
.bind(&self.hash)
.fetch_all(&*self.db.unwrap()).await.unwrap();
let res = res.first().unwrap();
@ -43,19 +40,13 @@ impl Video {
self.path = Some(std::path::Path::new(&res.1).to_path_buf());
}
fn get_youtube_meta_info(&mut self) {
async fn get_youtube_meta_info(&mut self) {
log::info!("Fetching YouTube Metadata for {}", self.hash);
let db = self.db.as_mut().unwrap().lock().unwrap();
let res: Vec<(String, String, String, String, String, usize)> = query_row_map!(
db,
"SELECT id, description, uploader_name, uploader_id, upload_date, views FROM youtube_meta WHERE id = (SELECT youtube_id FROM videos WHERE sha256 = ?1 LIMIT 1)",
&[&self.hash],
|x| { Ok(
( x.get(0)? , x.get(1)?, x.get(2)?, x.get(3)?, x.get(4)?, x.get(5)? )
)
}
);
let res: Vec<(String, String, String, String, chrono::NaiveDate, i64)> = sqlx::query_as(
"SELECT id, description, uploader_name, uploader_id, upload_date, views FROM youtube_meta WHERE id = (SELECT youtube_id FROM videos WHERE sha256 = $1 LIMIT 1)")
.bind(&self.hash)
.fetch_all(&**self.db.as_ref().unwrap()).await.unwrap();
if let Some(res) = res.first() {
self.youtube_id = Some(res.0.clone());
@ -65,92 +56,88 @@ impl Video {
self.upload_date = Some(res.4.clone());
self.views = Some(res.5);
let res: Vec<String> = query_row_map!(
db,
"SELECT category FROM youtube_meta_categories WHERE youtube_id = ?1",
&[self.youtube_id.as_ref().unwrap()],
|x| { x.get(0) }
);
let res: Vec<(String,)> = sqlx::query_as(
"SELECT category FROM youtube_meta_categories WHERE youtube_id = $1")
.bind(self.youtube_id.as_ref().unwrap())
.fetch_all(&**self.db.as_ref().unwrap()).await.unwrap();
self.categories = Some(res);
self.categories = Some(res.into_iter().map(|x| x.0).collect());
let res: Vec<String> = query_row_map!(
db,
"SELECT tag FROM youtube_meta_tags WHERE youtube_id = ?1",
&[self.youtube_id.as_ref().unwrap()],
|x| { x.get(0) }
);
let res: Vec<(String,)> = sqlx::query_as(
"SELECT tag FROM youtube_meta_tags WHERE youtube_id = $1")
.bind(self.youtube_id.as_ref().unwrap())
.fetch_all(&**self.db.as_ref().unwrap()).await.unwrap();
self.tags = Some(res);
self.tags = Some(res.into_iter().map(|x| x.0).collect());
}
}
pub fn title(&mut self) -> Option<&str> {
pub async fn title(&mut self) -> Option<&str> {
if self.title.is_none() {
self.get_video_info();
self.get_video_info().await;
}
self.title.as_deref()
}
pub fn path(&mut self) -> Option<PathBuf> {
pub async fn path(&mut self) -> Option<PathBuf> {
if self.path.is_none() {
self.get_video_info();
self.get_video_info().await;
}
self.path.as_ref().map(std::clone::Clone::clone)
}
pub fn description(&mut self) -> Option<&str> {
pub async fn description(&mut self) -> Option<&str> {
if self.description.is_none() {
self.get_youtube_meta_info();
self.get_youtube_meta_info().await;
}
self.description.as_deref()
}
pub fn views(&mut self) -> Option<usize> {
pub async fn views(&mut self) -> Option<i64> {
if self.views.is_none() {
self.get_youtube_meta_info();
self.get_youtube_meta_info().await;
}
self.views
}
pub fn uploader_name(&mut self) -> Option<&str> {
pub async fn uploader_name(&mut self) -> Option<&str> {
if self.uploader_name.is_none() {
self.get_youtube_meta_info();
self.get_youtube_meta_info().await;
}
self.uploader_name.as_deref()
}
pub fn uploader_id(&mut self) -> Option<&str> {
pub async fn uploader_id(&mut self) -> Option<&str> {
if self.uploader_id.is_none() {
self.get_youtube_meta_info();
self.get_youtube_meta_info().await;
}
self.uploader_id.as_deref()
}
pub fn upload_date(&mut self) -> Option<&str> {
pub async fn upload_date(&mut self) -> Option<chrono::NaiveDate> {
if self.upload_date.is_none() {
self.get_youtube_meta_info();
self.get_youtube_meta_info().await;
}
self.upload_date.as_deref()
self.upload_date
}
pub fn categories(&mut self) -> Option<&Vec<String>> {
pub async fn categories(&mut self) -> Option<&Vec<String>> {
if self.categories.is_none() {
self.get_youtube_meta_info();
self.get_youtube_meta_info().await;
}
self.categories.as_ref()
}
pub fn tags(&mut self) -> Option<&Vec<String>> {
pub async fn tags(&mut self) -> Option<&Vec<String>> {
if self.tags.is_none() {
self.get_youtube_meta_info();
self.get_youtube_meta_info().await;
}
self.tags.as_ref()
}
pub fn youtube_id(&mut self) -> Option<&str> {
pub async fn youtube_id(&mut self) -> Option<&str> {
if self.youtube_id.is_none() {
self.get_youtube_meta_info();
self.get_youtube_meta_info().await;
}
self.youtube_id.as_deref()
}
@ -161,15 +148,16 @@ impl Video {
}
// Video Init
impl Video {
pub fn from_hash(hash: &str, db: Arc<Mutex<rusqlite::Connection>>) -> Self {
impl<'a> Video<'a> {
pub fn from_hash(hash: &str, db: &'a sqlx::PgPool) -> Self {
Self {
hash: hash.to_owned(),
db: Some(db),
..Default::default()
}
}
pub fn insert_path_to_db(db: &Arc<Mutex<rusqlite::Connection>>, v: &PathBuf) -> Self {
pub async fn insert_path_to_db(db: &'a sqlx::PgPool, v: &PathBuf) -> Option<Self> {
log::info!("Add {v:?} to library");
let id = calculate_sha256_hash(v.to_str().unwrap()).unwrap();
let file_name = v.file_stem().unwrap().to_str().unwrap().to_owned();
@ -182,52 +170,44 @@ impl Video {
.unwrap()
.to_owned();
let mut sdb = db.lock().unwrap();
let tx = sdb.transaction().unwrap();
let mut tx = db.begin().await.unwrap();
if let Some(meta) = yt_meta::get_youtube_metadata(v) {
tx.execute(
"INSERT INTO videos (sha256, directory, path, title, youtube_id) VALUES (?1, ?2, ?3, ?4, ?5)",
[
&id,
&dir,
v.to_str().unwrap(),
&meta.title(),
&meta.youtube_id().unwrap()
]).unwrap();
sqlx::query("INSERT INTO youtube_meta (id, title, description, uploader_name, uploader_id, duration, views, upload_date) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)")
.bind(&meta.youtube_id().unwrap())
.bind(&meta.title())
.bind(&meta.description().unwrap())
.bind(&meta.uploader_name().unwrap())
.bind(&meta.uploader_id().unwrap())
.bind(&meta.duration().unwrap())
.bind(&meta.views().unwrap())
.bind(&meta.upload_date())
.execute(&mut *tx).await.unwrap();
let _ = tx.execute(
"INSERT INTO youtube_meta (id, title, description, uploader_name, uploader_id, duration, views, upload_date) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8)",
[
&meta.youtube_id().unwrap(),
&meta.title(),
&meta.description().unwrap(),
&meta.uploader_name().unwrap(),
&meta.uploader_id().unwrap(),
&meta.duration().unwrap().to_string(),
&meta.views().unwrap().to_string(),
&meta.upload_date().unwrap()
]);
sqlx::query("INSERT INTO videos (sha256, directory, path, title, youtube_id) VALUES ($1, $2, $3, $4, $5)")
.bind(&id)
.bind(&dir)
.bind(v.to_str().unwrap())
.bind(meta.title())
.bind(meta.youtube_id().unwrap())
.execute(&mut *tx).await.unwrap();
for cat in meta.categories().unwrap() {
let _ = tx.execute(
"INSERT INTO youtube_meta_categories (youtube_id, category) VALUES (?1, ?2)",
[&meta.youtube_id().unwrap(), &cat],
);
sqlx::query("INSERT INTO youtube_meta_categories (youtube_id, category) VALUES ($1, $2)")
.bind(meta.youtube_id().unwrap()).bind(cat).execute(&mut *tx).await.unwrap();
}
if let Some(tags) = meta.tags() {
for tag in tags {
let _ = tx.execute(
"INSERT INTO youtube_meta_tags (youtube_id, tag) VALUES (?1, ?2)",
[&meta.youtube_id().unwrap(), &tag],
);
sqlx::query(
"INSERT INTO youtube_meta_tags (youtube_id, tag) VALUES ($1, $2)")
.bind(&meta.youtube_id().unwrap()).bind(&tag).execute(&mut *tx).await.unwrap();
}
}
tx.commit().unwrap();
tx.commit().await.unwrap();
return Self {
return Some(Self {
directory: Some(dir),
path: Some(v.to_owned()),
title: Some(meta.title()),
@ -241,24 +221,43 @@ impl Video {
categories: meta.categories(),
tags: meta.tags(),
upload_date: meta.upload_date(),
db: Some(db.clone()),
};
db: Some(db),
});
}
tx.execute(
"INSERT OR REPLACE INTO videos (sha256, directory, path, title) VALUES (?1, ?2, ?3, ?4)",
[&id, &dir, v.to_str().unwrap(), &file_name],
)
.unwrap();
sqlx::query(
"INSERT INTO videos (sha256, directory, path, title) VALUES ($1, $2, $3, $4)")
.bind(&id).bind(dir).bind(v.to_str().unwrap()).bind(file_name).execute(&mut *tx).await.unwrap();
tx.commit().unwrap();
tx.commit().await.unwrap();
Self {
Some(Self {
path: Some(v.to_owned()),
title: Some(v.file_stem().unwrap().to_str().unwrap().to_owned()),
hash: id,
db: Some(db.clone()),
db: Some(db),
..Self::default()
}
})
}
}
impl Video<'_> {
pub async fn api(&mut self) -> serde_json::Value {
self.get_video_info().await;
self.get_youtube_meta_info().await;
json!({
"directory": self.directory.as_ref().unwrap(),
"title": self.title.as_ref().unwrap(),
"hash": self.hash,
"youtube_id": self.youtube_id,
"uploader_name": self.uploader_name,
"uploader_id": self.uploader_id,
"duration": self.duration,
"views": self.views,
"categories": self.categories,
"tags": self.tags,
"upload_date": self.upload_date,
})
}
}

View file

@ -1,13 +1,18 @@
use std::path::Path;
use rocket::{http::Method, routes};
mod library;
mod pages;
mod yt_meta;
// TODO : Add User Auth DB
#[actix_web::main]
async fn main() -> std::io::Result<()> {
// TODO : Rework into Video Server Backend
// -> API
#[rocket::launch]
async fn launch() -> _ {
std::env::set_var("RUST_LOG", "info");
std::env::set_var("RUST_BACKTRACE", "1");
env_logger::init();
@ -21,49 +26,35 @@ async fn main() -> std::io::Result<()> {
let dir_path = args[1].clone();
let lib = library::Library::new();
let library = actix_web::web::Data::new(lib);
let lib = library::Library::new().await;
let lib = library.clone();
let library = lib.clone();
std::thread::spawn(move || {
lib.scan_dir(&Path::new(&dir_path.clone()).to_path_buf());
});
library.scan_dir(&Path::new(&dir_path.clone()).to_path_buf()).await;
web_base::map!(
web_base::Site::new()
.enable_picocss(true)
.enable_htmx(true)
.enable_favicon("/icon".to_owned())
.add_manifest(
web_base::Manifest::new("WatchDogs")
.set_short_name("WatchDogs")
.set_background_color("rgb(1, 4, 29)")
.set_theme_color("#e53935")
.add_icon("/icon", "1024x1024", "image/png")
.set_start_url("/")
.set_display(web_base::ManifestDisplay::MinimalUI)
.set_description("watch & share videos")
),
|app: actix_web::App<_>| {
// Base
app.app_data(library.clone())
.service(pages::index::index)
.service(pages::index::channel_page)
.service(pages::index::search)
// Videos
.service(pages::video::video_page)
// Assets
.service(pages::assets::icon_resource)
.service(pages::assets::video_file)
.service(pages::assets::video_thumbnail)
// YT Pages
.service(pages::yt::yt_channel_page)
.service(pages::yt::yt_tags)
.service(pages::yt::yt_tag_page)
}
)
.bind(("0.0.0.0".to_string(), 8080))?
.run()
.await
let cors = rocket_cors::CorsOptions {
allowed_origins: rocket_cors::AllowedOrigins::all(),
allowed_methods: vec![Method::Get, Method::Post, Method::Options]
.into_iter()
.map(From::from)
.collect(),
allowed_headers: rocket_cors::AllowedHeaders::all(),
allow_credentials: true,
..Default::default()
}
.to_cors()
.expect("error creating CORS options");
rocket::build()
.mount("/", routes![
pages::assets::video_file,
pages::assets::video_thumbnail,
pages::index::search,
pages::index::channel_page,
pages::yt::yt_tags,
pages::yt::yt_tag_page,
pages::yt::yt_channel_page
])
.attach(cors)
.manage(lib)
}

View file

@ -1,45 +1,31 @@
use actix_web::{get, HttpRequest, Responder};
use rocket::{fs::NamedFile, get, State};
#[get("/icon")]
pub async fn icon_resource(_r: HttpRequest) -> impl Responder {
web_base::send_data(
include_bytes!("../icon.png").to_vec(),
"image/png",
"icon.png",
)
}
use crate::library::Library;
#[get("/video/raw")]
pub async fn video_file(r: HttpRequest) -> Option<impl Responder> {
let library: &actix_web::web::Data<crate::library::Library> = r.app_data().unwrap();
let query = web_base::parse_query_string(r.query_string());
if let Some(video_id) = query.get("v") {
let (_, mut video) = if let Some((channel, video)) = library.get_video_by_hash(video_id) {
#[get("/video/raw?<v>")]
pub async fn video_file(v: &str, library: &State<Library>) -> Option<NamedFile> {
let (_, mut video) = if let Some((channel, video)) = library.get_video_by_hash(v).await {
(channel, video)
} else {
library.get_video_by_youtube_id(video_id).unwrap()
library.get_video_by_youtube_id(v).await.unwrap()
};
return Some(actix_files::NamedFile::open(video.path().unwrap()).unwrap());
}
None
NamedFile::open(video.path().await?).await.ok()
}
#[get("/video/thumbnail")]
pub async fn video_thumbnail(r: HttpRequest) -> Option<impl Responder> {
let library: &actix_web::web::Data<crate::library::Library> = r.app_data().unwrap();
let query = web_base::parse_query_string(r.query_string());
if let Some(video_id) = query.get("v") {
let (_, mut video) = if let Some((channel, video)) = library.get_video_by_hash(video_id) {
#[get("/video/thumbnail?<v>")]
pub async fn video_thumbnail(v: &str, library: &State<Library>) -> Option<NamedFile> {
let (_, mut video) = if let Some((channel, video)) = library.get_video_by_hash(v).await {
(channel, video)
} else {
library.get_video_by_youtube_id(video_id).unwrap()
library.get_video_by_youtube_id(v).await.unwrap()
};
let path = video.path().unwrap();
let path = video.path().await.unwrap();
let parent = path.parent().unwrap();
let thumbnail_path = path.file_stem().unwrap().to_str().unwrap();
let thumbnail_path = parent.join(thumbnail_path);
let thumbnail_path = thumbnail_path.to_str().unwrap();
return Some(actix_files::NamedFile::open(format!("{thumbnail_path}.jpg")).unwrap());
}
None
NamedFile::open(format!("{thumbnail_path}.jpg")).await.ok()
}

View file

@ -1,88 +0,0 @@
use actix_web::HttpResponse;
use maud::{html, PreEscaped};
use crate::library::Video;
pub fn loading_spinner() -> PreEscaped<String> {
html! {
style {
".spinner { display: flex;justify-content: center;align-items: center;height: 100vh;}
.spinner-border { border: 2px solid #007bff;border-top: 2px solid transparent;border-radius: 50%;width: 40px;height: 40px;animation: spin 1s linear infinite;}
@keyframes spin {0% { transform: rotate(0deg); }100% { transform: rotate(360deg); }}"
};
div class="spinner" {
div class="spinner-border" {};
};
}
}
pub fn search_bar(query: &str) -> PreEscaped<String> {
html! {
form hx-get="/search" action="/search" hx-push-url="true" hx-target="#main-view" hx-swap="innerHTML" {
input style="width: 100%;" value=(query) name="query" type="search" placeholder="Search...";
};
}
}
pub fn video_element(video: &mut Video) -> PreEscaped<String> {
html!(
@let desc = video.description().unwrap_or_default().to_owned();
@let video_hash = video.hash();
article class="container-fluid" style="margin: 50px; cursor: pointer;" {
a href=(format!("/watch?v={video_hash}")) style="text-decoration:none !important;" {
img style="width: 350px;" width="480" src=(format!("/video/thumbnail?v={video_hash}"));
div style="padding: 10px;" {
h2 style="margin: 0; font-size: 18px;" { (video.title().unwrap()) };
@if !desc.is_empty() {
p style="margin: 0; color: grey; font-size: 14px;margin-top: 10px;" { (desc.chars().take(200).chain("...".to_string().chars()).take(203).collect::<String>()) };
};
};
};
};
)
}
pub fn header(query: &str) -> PreEscaped<String> {
html!(
header style="padding: 10px 0; display: flex; justify-content: space-between;" {
a href="/" style="text-decoration: none; margin-left: 20px;" {
div style="margin-right: 20px;display:flex;align-items: center" {
img src="/icon" width="64" style="margin-top: -25px;margin-right: 15px;border-radius: 20%;";
p style="font-size: 42px;" { "WatchDogs" };
};
};
div style="width: 35px;" {};
div style="flex-grow: 1; text-align: center;" {
(search_bar(query));
};
};
)
}
pub fn build_site(r: &actix_web::HttpRequest, title: &str, content: &str) -> HttpResponse<String> {
build_site_fn(r, title, content, None)
}
pub fn build_site_fn(
r: &actix_web::HttpRequest,
title: &str,
content: &str,
query: Option<String>,
) -> HttpResponse<String> {
let content = html! {
body class="container" data-theme="dark" id="main-view" {
style {
(r#"
:root:not([data-theme]) {
--background-color: rgb(1, 4, 29);
--primary: #e53935;
}
"#)
};
(header(&query.unwrap_or_default()))
(PreEscaped(content))
};
}
.into_string();
web_base::func::build_site_from_body(&web_base::Site::from_request(r), title, &content)
}

View file

@ -1,115 +1,25 @@
use actix_web::{get, HttpRequest, Responder};
use maud::html;
use rocket::{get, State};
use serde_json::json;
use crate::pages::components::loading_spinner;
use crate::library::Library;
use super::components::{build_site, build_site_fn, video_element};
use super::vec_to_api_video_mut;
#[get("/search")]
pub async fn search(r: HttpRequest) -> impl Responder {
let library: &actix_web::web::Data<crate::library::Library> = r.app_data().unwrap();
const NUM_OF_RESULTS: usize = 20;
if let Some(query) = web_base::parse_query_string(r.query_string()).get("query") {
#[get("/search?<query>&<offset>")]
pub async fn search(query: &str, offset: Option<i64>, library: &State<Library>) -> Option<serde_json::Value> {
const NUM_OF_RESULTS: i64 = 20;
// get start parameter for search result chunks
let start = web_base::parse_query_string(r.query_string())
.get("offset")
.map(|x| x.parse::<usize>().ok());
let start = if let Some(Some(start)) = start {
start
} else {
0
};
let mut video_matches = library.search_video(query, start, NUM_OF_RESULTS);
let start = offset.unwrap_or(0);
let mut video_matches = library.search_video(query, start, NUM_OF_RESULTS).await;
// return raw html if request is from htmx
if web_base::is_htmx_request(&r) && start != 0 {
let content = if video_matches.is_empty() {
// return end of search results
String::new()
} else {
html!(
@for vid_match in &mut video_matches {
(video_element(vid_match));
};
// request more results
div hx-get=(format!("/search?query={query}&offset={}", start+NUM_OF_RESULTS)) hx-trigger="intersect" hx-swap="outerHTML" {
( loading_spinner() )
};
)
.into_string()
};
return actix_web::HttpResponse::Ok().message_body(content).unwrap();
}
// search page
let content = html!(
p { "You searched for " code { (query) }; };
hr;
@for vid_match in &mut video_matches {
(video_element(vid_match));
};
div hx-get=(format!("/search?query={query}&offset={}", start+NUM_OF_RESULTS)) hx-trigger="load" hx-swap="outerHTML" {
( loading_spinner() )
};
)
.into_string();
build_site_fn(&r, "Search", &content, Some(query.to_owned()))
} else {
web_base::redirect("/")
}
Some(json!(vec_to_api_video_mut(&mut video_matches).await))
}
#[get("/d/{dir}")]
pub async fn channel_page(r: HttpRequest, p: actix_web::web::Path<String>) -> impl Responder {
let library: &actix_web::web::Data<crate::library::Library> = r.app_data().unwrap();
let dir_name = p.into_inner();
#[get("/d/<dir>")]
pub async fn channel_page(dir: &str, library: &State<Library>) -> Option<serde_json::Value> {
let mut dir_videos = library.get_directory_videos(dir).await;
let mut dir_videos = library.get_directory_videos(&dir_name);
let video_grids: Vec<_> = dir_videos
.chunks_mut(3)
.map(|x| {
html!(
div class="grid" {
@for video in x {
(video_element(video));
};
};
)
})
.collect();
let content = html!(
h1 { (format!("{dir_name} ({})", dir_videos.len())) };
@for grid in video_grids {
( grid )
};
)
.into_string();
build_site(&r, &dir_name, &content)
}
#[get("/")]
pub(crate) async fn index(r: HttpRequest) -> impl Responder {
let library: &actix_web::web::Data<crate::library::Library> = r.app_data().unwrap();
let dirs = library.get_directories();
println!("{:?}", r.headers());
let content = html!(
h1 style="margin-bottom: -35px;" { "Random Videos" };
div class="grid" style="margin-left: -80px;" {
@for mut vid in library.get_random_videos(3) {
( video_element(&mut vid) );
};
};
h1 { "Directories:" };
@for dir in dirs {
a href=(format!("/d/{dir}")) { (dir) };
br;
};
)
.into_string();
build_site(&r, "WatchDogs", &content)
Some(json!(vec_to_api_video_mut(&mut dir_videos).await))
}

View file

@ -1,5 +1,30 @@
pub mod assets;
pub mod components;
pub mod index;
pub mod video;
pub mod yt;
/// A trait to generate a Model API representation in JSON format.
pub trait ToAPI: Sized {
/// Generate public API JSON
fn api(&self) -> impl std::future::Future<Output = serde_json::Value>;
}
/// Converts a slice of items implementing the `ToAPI` trait into a `Vec` of JSON values.
pub async fn vec_to_api(items: &[impl ToAPI]) -> Vec<serde_json::Value> {
let mut ret = Vec::with_capacity(items.len());
for e in items {
ret.push(e.api().await);
}
ret
}
pub async fn vec_to_api_video_mut(items: &mut [crate::library::Video<'_>]) -> Vec<serde_json::Value> {
let mut ret = Vec::with_capacity(items.len());
for e in items {
ret.push(e.api().await);
}
ret
}

View file

@ -1,84 +0,0 @@
use actix_web::{get, HttpRequest, Responder};
use maud::html;
use super::components::build_site;
#[get("/watch")]
pub async fn video_page(r: HttpRequest) -> impl Responder {
let library: &actix_web::web::Data<crate::library::Library> = r.app_data().unwrap();
let query = web_base::parse_query_string(r.query_string());
if let Some(video_id) = query.get("v") {
let (channel, mut video) =
if let Some((channel, video)) = library.get_video_by_hash(video_id) {
(channel, video)
} else {
library.get_video_by_youtube_id(video_id).unwrap()
};
let uploader_id = video.uploader_id().map(std::borrow::ToOwned::to_owned);
let content = html! {
div style="position: relative; padding-bottom: 56.25%; overflow: hidden; max-width: 100%;" {
video style="margin-bottom: 50px;position: absolute; top: 0; left: 0; width: 100%; height: 100%;" height="420" controls autoplay src=(format!("/video/raw?v={video_id}")) {
source src=(format!("/video/raw?v={video_id}")) type="video/x-matroska";
};
};
h2 style="margin-bottom: 10px;margin-top: 10px;" { ( &video.title().unwrap() ) }
hr;
p { "In: " a href=(format!("/d/{channel}")) { (channel)};};
@if video.youtube_id().is_some() {
article class="container-fluid" id="youtube" {
@if let Some(id) = &video.youtube_id() {
a href=(format!("https://youtube.com/watch?v={id}")) { "Watch on YouTube" };
};
p {
"Uploaded"
@if let Some(uploader) = &video.uploader_name() {
" by " a href=(format!("/yt/c/{}", uploader_id.unwrap_or_default())) style="font-weight: bold;" { (uploader) };
}
@if let Some(upload_date) = &video.upload_date() {
" on " span style="font-weight: bold;" { (
chrono::NaiveDate::parse_from_str(upload_date, "%Y%m%d").unwrap().format("%d.%m.%Y")
) };
}
};
@if let Some(views) = video.views() {
p { "Views: " (views) };
};
@if let Some(cats) = video.categories() {
@if !cats.is_empty() {
p {
"Categories: "
@for cat in cats {
code { (cat) };
};
};
};
};
@if let Some(tags) = video.tags() {
@if !tags.is_empty() {
p {
"Tags: "
@for tag in tags {
a href=(format!("/yt/tag/{tag}")) { code style="margin: 2px;" { (tag) }; }; " "
};
};
};
};
@if let Some(desc) = video.description() {
details open {
summary { b { ("Description:" )}};
@for line in desc.lines() {
(line);
br;
};
};
};
};
};
}
.into_string();
return build_site(&r, video.title().unwrap(), &content);
}
build_site(&r, "Video not found", "<p> Video not found </p>")
}

View file

@ -1,84 +1,23 @@
use actix_web::{get, HttpRequest, Responder};
use maud::html;
use rocket::{get, State};
use serde_json::json;
use super::components::{build_site, video_element};
use crate::{library::Library, pages::vec_to_api_video_mut};
#[get("/yt/tags")]
pub async fn yt_tags(r: HttpRequest) -> impl Responder {
let library: &actix_web::web::Data<crate::library::Library> = r.app_data().unwrap();
let tags = library.get_tags_yt();
let content = html!(
h1 { (format!("†ags ({}): ", tags.len())) };
@for tag in tags {
a href=(format!("/yt/tag/{tag}")) {
(tag)
};
br;
};
)
.into_string();
build_site(&r, "Tags", &content)
pub async fn yt_tags(library: &State<Library>) -> serde_json::Value {
let tags = library.get_tags_yt().await;
json!(tags)
}
#[get("/yt/tag/{tag}")]
pub async fn yt_tag_page(r: HttpRequest, p: actix_web::web::Path<String>) -> impl Responder {
let library: &actix_web::web::Data<crate::library::Library> = r.app_data().unwrap();
let tag = p.into_inner();
#[get("/yt/tag/<tag>")]
pub async fn yt_tag_page(tag: &str, library: &State<Library>) -> serde_json::Value {
let mut channel_videos = library.get_videos_by_tag_yt(&tag).await;
let mut channel_videos = library.get_videos_by_tag_yt(&tag);
let video_grids: Vec<_> = channel_videos
.chunks_mut(3)
.map(|x| {
html!(
div class="grid" {
@for video in x {
(video_element(video));
};
};
)
})
.collect();
let content = html!(
h1 { (tag) };
@for grid in video_grids {
( grid )
};
)
.into_string();
build_site(&r, &format!("#{tag}"), &content)
json!(vec_to_api_video_mut(&mut channel_videos).await)
}
#[get("/yt/c/{channel}")]
pub async fn yt_channel_page(r: HttpRequest, p: actix_web::web::Path<String>) -> impl Responder {
let library: &actix_web::web::Data<crate::library::Library> = r.app_data().unwrap();
let channel_id = p.into_inner();
let mut channel_videos = library.get_channel_videos_yt(&channel_id);
let channel_name = library.get_channel_name_yt(&channel_id);
let video_grids: Vec<_> = channel_videos
.chunks_mut(3)
.map(|x| {
html!(
div class="grid" {
@for video in x {
(video_element(video));
};
};
)
})
.collect();
let content = html!(
h1 { (channel_name) };
@for grid in video_grids {
( grid )
};
)
.into_string();
build_site(&r, &channel_name, &content)
#[get("/yt/c/<channel>")]
pub async fn yt_channel_page(channel: &str, library: &State<Library>) -> serde_json::Value {
let mut channel_videos = library.get_channel_videos_yt(&channel).await;
json!(vec_to_api_video_mut(&mut channel_videos).await)
}

View file

@ -81,24 +81,24 @@ impl YouTubeMeta {
.map(std::borrow::ToOwned::to_owned)
}
pub fn duration(&self) -> Option<usize> {
pub fn duration(&self) -> Option<i64> {
self.inner
.as_object()
.unwrap()
.get("duration")
.unwrap()
.as_i64()
.map(|x| (x as usize).to_owned())
.map(|x| x.to_owned())
}
pub fn views(&self) -> Option<usize> {
pub fn views(&self) -> Option<i64> {
self.inner
.as_object()
.unwrap()
.get("view_count")
.unwrap()
.as_i64()
.map(|x| (x as usize).to_owned())
.map(|x| x.to_owned())
}
pub fn categories(&self) -> Option<Vec<String>> {
@ -106,7 +106,7 @@ impl YouTubeMeta {
.as_object()
.unwrap()
.get("categories")
.unwrap()
.unwrap_or(&serde_json::json!(Vec::<&str>::new()))
.as_array()
.unwrap()
.iter()
@ -126,13 +126,13 @@ impl YouTubeMeta {
None
}
pub fn upload_date(&self) -> Option<String> {
pub fn upload_date(&self) -> Option<chrono::NaiveDate> {
self.inner
.as_object()
.unwrap()
.get("upload_date")
.unwrap()
.as_str()
.map(std::borrow::ToOwned::to_owned)
.map(|x| chrono::NaiveDate::parse_from_str(x, "%Y%m%d").unwrap())
}
}