update
This commit is contained in:
parent
1979fc246e
commit
224bdf77ab
9 changed files with 207 additions and 367 deletions
|
@ -25,12 +25,11 @@ CREATE TABLE IF NOT EXISTS "youtube_meta_categories" (
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS "videos" (
|
CREATE TABLE IF NOT EXISTS "videos" (
|
||||||
"sha256" TEXT NOT NULL,
|
"id" UUID NOT NULL PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
"directory" TEXT NOT NULL,
|
"directory" TEXT NOT NULL,
|
||||||
"path" TEXT NOT NULL,
|
"path" TEXT NOT NULL,
|
||||||
"duration" INTEGER,
|
"duration" INTEGER,
|
||||||
"title" TEXT,
|
"title" TEXT,
|
||||||
"youtube_id" TEXT,
|
"youtube_id" TEXT,
|
||||||
PRIMARY KEY("sha256"),
|
|
||||||
FOREIGN KEY("youtube_id") REFERENCES "youtube_meta"("id")
|
FOREIGN KEY("youtube_id") REFERENCES "youtube_meta"("id")
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
use ring::digest::{Context, SHA256};
|
|
||||||
use std::io::Read;
|
|
||||||
|
|
||||||
pub fn is_video_file(filename: &str) -> bool {
|
pub fn is_video_file(filename: &str) -> bool {
|
||||||
let video_extensions = vec!["mp4", "mkv", "webm"];
|
let video_extensions = vec!["mp4", "mkv", "webm"];
|
||||||
|
|
||||||
|
@ -11,21 +8,3 @@ pub fn is_video_file(filename: &str) -> bool {
|
||||||
}
|
}
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn calculate_sha256_hash(file_path: &str) -> std::io::Result<String> {
|
|
||||||
log::info!("Hashing {file_path}");
|
|
||||||
let mut context = Context::new(&SHA256);
|
|
||||||
let mut file = std::fs::File::open(file_path)?;
|
|
||||||
|
|
||||||
let mut buffer = [0u8; 4096];
|
|
||||||
loop {
|
|
||||||
let n = file.read(&mut buffer)?;
|
|
||||||
if n == 0 {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
context.update(&buffer[..n]);
|
|
||||||
}
|
|
||||||
|
|
||||||
let digest = context.finish();
|
|
||||||
Ok(hex::encode(digest.as_ref()))
|
|
||||||
}
|
|
||||||
|
|
|
@ -42,21 +42,11 @@ impl Library {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_directory_videos(&self, dir: &str) -> Vec<Video> {
|
pub async fn get_directory_videos(&self, dir: &str) -> Vec<Video> {
|
||||||
let videos_ids: Vec<(String, )> = sqlx::query_as(
|
sqlx::query_as("SELECT * FROM videos WHERE directory = ?1")
|
||||||
"SELECT sha256 FROM videos INNER JOIN youtube_meta ON youtube_meta.id = videos.youtube_id WHERE directory = ?1 ORDER BY youtube_meta.upload_date DESC;")
|
|
||||||
.bind(dir)
|
.bind(dir)
|
||||||
.fetch_all(&self.conn).await.unwrap();
|
.fetch_all(&self.conn)
|
||||||
|
.await
|
||||||
let mut videos: Vec<Video<'_>> = Vec::new();
|
.unwrap()
|
||||||
|
|
||||||
for video in videos_ids {
|
|
||||||
videos.push(Video::from_hash(
|
|
||||||
&video.0,
|
|
||||||
&self.conn
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
videos
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// YT
|
// YT
|
||||||
|
@ -81,95 +71,43 @@ videos.push(Video::from_hash(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_videos_by_tag_yt(&self, tag: &str) -> Vec<Video> {
|
pub async fn get_videos_by_tag_yt(&self, tag: &str) -> Vec<Video> {
|
||||||
let videos_ids: Vec<(String, )> = sqlx::query_as(
|
sqlx::query_as(
|
||||||
"SELECT sha256 FROM youtube_meta_tags INNER JOIN youtube_meta ON youtube_meta_tags.youtube_id = youtube_meta.id INNER JOIN videos ON videos.youtube_id = youtube_meta.id WHERE tag = $1;")
|
"SELECT * FROM youtube_meta_tags INNER JOIN youtube_meta ON youtube_meta_tags.youtube_id = youtube_meta.id INNER JOIN videos ON videos.youtube_id = youtube_meta.id WHERE tag = $1")
|
||||||
.bind(tag)
|
.bind(tag)
|
||||||
.fetch_all(&self.conn).await.unwrap();
|
.fetch_all(&self.conn).await.unwrap()
|
||||||
|
|
||||||
let mut videos: Vec<Video<'_>> = Vec::new();
|
|
||||||
|
|
||||||
for video in videos_ids {
|
|
||||||
videos.push(Video::from_hash(
|
|
||||||
&video.0,
|
|
||||||
&self.conn
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
videos
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_channel_videos_yt(&self, id: &str) -> Vec<Video> {
|
pub async fn get_channel_videos_yt(&self, id: &str) -> Vec<Video> {
|
||||||
let videos_ids: Vec<(String, )> = sqlx::query_as(
|
sqlx::query_as(
|
||||||
"SELECT sha256 FROM youtube_meta INNER JOIN videos ON youtube_meta.id = videos.youtube_id WHERE uploader_id = $1 ORDER BY youtube_meta.upload_date DESC;")
|
"SELECT * FROM youtube_meta INNER JOIN videos ON youtube_meta.id = videos.youtube_id WHERE uploader_id = $1 ORDER BY youtube_meta.upload_date DESC;")
|
||||||
.bind(id)
|
.bind(id)
|
||||||
.fetch_all(&self.conn).await.unwrap();
|
.fetch_all(&self.conn).await.unwrap()
|
||||||
|
|
||||||
let mut videos: Vec<Video<'_>> = Vec::new();
|
|
||||||
|
|
||||||
for video in videos_ids {
|
|
||||||
videos.push(Video::from_hash(
|
|
||||||
&video.0,
|
|
||||||
&self.conn
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
videos
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// videos
|
// videos
|
||||||
|
|
||||||
pub async fn get_random_videos(&self, n: i64) -> Vec<Video> {
|
pub async fn get_random_videos(&self, n: i64) -> Vec<Video> {
|
||||||
let videos_ids: Vec<(String, )> = sqlx::query_as(
|
sqlx::query_as("SELECT * FROM videos ORDER BY RANDOM() LIMIT $1;")
|
||||||
"SELECT sha256 FROM videos ORDER BY RANDOM() LIMIT $1;")
|
|
||||||
.bind(n)
|
.bind(n)
|
||||||
.fetch_all(&self.conn).await.unwrap();
|
.fetch_all(&self.conn)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
let mut videos: Vec<Video<'_>> = Vec::new();
|
|
||||||
|
|
||||||
for video in videos_ids {
|
|
||||||
videos.push(Video::from_hash(
|
|
||||||
&video.0,
|
|
||||||
&self.conn
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
videos
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_video_by_hash(&self, hash: &str) -> Option<(String, Video)> {
|
pub async fn get_video_by_id(&self, id: &str) -> Option<Video> {
|
||||||
let res: Vec<(String, Video)> = sqlx::query_as::<sqlx::Postgres, (String, String)>(
|
sqlx::query_as("SELECT * FROM videos WHERE id = $1")
|
||||||
"SELECT sha256, directory FROM videos WHERE sha256 = $1"
|
|
||||||
)
|
|
||||||
.bind(hash)
|
|
||||||
.fetch_all(&self.conn).await.unwrap().into_iter()
|
|
||||||
.map(|x| {
|
|
||||||
(x.1, Video::from_hash(&x.0, &self.conn))
|
|
||||||
}).collect();
|
|
||||||
|
|
||||||
if !res.is_empty() {
|
|
||||||
return res.first().map(std::borrow::ToOwned::to_owned);
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_video_by_youtube_id(&self, id: &str) -> Option<(String, Video)> {
|
|
||||||
let res: Vec<(String, Video<'_>)> = sqlx::query_as(
|
|
||||||
"SELECT sha256, directory FROM videos WHERE youtube_id = $1")
|
|
||||||
.bind(id)
|
.bind(id)
|
||||||
.fetch_all(&self.conn).await.unwrap().into_iter().map(|x: (String, String)| {
|
.fetch_optional(&self.conn)
|
||||||
(
|
.await
|
||||||
x.1,
|
.unwrap()
|
||||||
Video::from_hash(&x.0, &self.conn),
|
}
|
||||||
)
|
|
||||||
}).collect();
|
|
||||||
|
|
||||||
if !res.is_empty() {
|
pub async fn get_video_by_youtube_id(&self, id: &str) -> Option<Video> {
|
||||||
return res.first().map(std::borrow::ToOwned::to_owned);
|
sqlx::query_as("SELECT * FROM videos WHERE youtube_id = $1")
|
||||||
}
|
.bind(id)
|
||||||
|
.fetch_optional(&self.conn)
|
||||||
None
|
.await
|
||||||
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn has_path(&self, path: &Path) -> bool {
|
pub async fn has_path(&self, path: &Path) -> bool {
|
||||||
|
@ -186,9 +124,9 @@ videos.push(Video::from_hash(
|
||||||
pub async fn search_video(&self, query: &str, start: i64, n: i64) -> Vec<Video> {
|
pub async fn search_video(&self, query: &str, start: i64, n: i64) -> Vec<Video> {
|
||||||
let query = format!("%{query}%");
|
let query = format!("%{query}%");
|
||||||
|
|
||||||
sqlx::query_as(
|
let ids: Vec<(uuid::Uuid,)> = sqlx::query_as(
|
||||||
r#"SELECT DISTINCT
|
r#"SELECT DISTINCT
|
||||||
vm.sha256,
|
vm.id,
|
||||||
( -- Calculate a score for the video based on matches
|
( -- Calculate a score for the video based on matches
|
||||||
(ym.title LIKE $1) +
|
(ym.title LIKE $1) +
|
||||||
(ym.description LIKE $1) +
|
(ym.description LIKE $1) +
|
||||||
|
@ -209,17 +147,22 @@ videos.push(Video::from_hash(
|
||||||
(ymt.tag LIKE $1)
|
(ymt.tag LIKE $1)
|
||||||
ORDER BY
|
ORDER BY
|
||||||
score DESC,
|
score DESC,
|
||||||
ym.upload_date DESC LIMIT $2 OFFSET $3;"#
|
ym.upload_date DESC LIMIT $2 OFFSET $3;"#,
|
||||||
)
|
)
|
||||||
.bind(query)
|
.bind(query)
|
||||||
.bind(n)
|
.bind(n)
|
||||||
.bind(start)
|
.bind(start)
|
||||||
.fetch_all(&self.conn).await.unwrap().into_iter().map(|x: (String, i64)| {
|
.fetch_all(&self.conn)
|
||||||
Video::from_hash(
|
.await
|
||||||
&x.0,
|
.unwrap();
|
||||||
&self.conn
|
|
||||||
)
|
let mut videos = Vec::new();
|
||||||
}).collect()
|
|
||||||
|
for id in ids {
|
||||||
|
videos.push(Video::get(&id.0).await.unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
videos
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -230,7 +173,7 @@ impl Library {
|
||||||
let lib = self.get_video_paths(dir).await;
|
let lib = self.get_video_paths(dir).await;
|
||||||
|
|
||||||
for path in lib {
|
for path in lib {
|
||||||
Video::insert_path_to_db(&self.conn, &path).await;
|
Video::insert_path_to_db(&path).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
log::info!("Finished scanning {dir:?}");
|
log::info!("Finished scanning {dir:?}");
|
||||||
|
|
|
@ -1,165 +1,76 @@
|
||||||
use std::path::PathBuf;
|
use crate::{get_pg, pages::ToAPI, yt_meta};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
use sqlx::prelude::FromRow;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use crate::library::func::calculate_sha256_hash;
|
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
|
||||||
use crate::yt_meta;
|
pub struct YouTubeMeta {
|
||||||
|
pub id: String,
|
||||||
// todo : optimize
|
pub title: String,
|
||||||
|
pub uploader_name: String,
|
||||||
#[derive(Debug, Default, Clone)]
|
pub uploader_id: String,
|
||||||
pub struct Video<'a> {
|
pub duration: i64,
|
||||||
directory: Option<String>,
|
pub views: i64,
|
||||||
path: Option<PathBuf>,
|
pub upload_date: chrono::NaiveDate,
|
||||||
title: Option<String>,
|
|
||||||
hash: String,
|
|
||||||
youtube_id: Option<String>,
|
|
||||||
description: Option<String>,
|
|
||||||
uploader_name: Option<String>,
|
|
||||||
uploader_id: Option<String>,
|
|
||||||
duration: Option<i64>,
|
|
||||||
views: Option<i64>,
|
|
||||||
categories: Option<Vec<String>>,
|
|
||||||
tags: Option<Vec<String>>,
|
|
||||||
upload_date: Option<chrono::NaiveDate>,
|
|
||||||
db: Option<&'a sqlx::PgPool>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Video properties
|
impl YouTubeMeta {
|
||||||
impl<'a> Video<'a> {
|
pub async fn get(id: &str) -> Option<Self> {
|
||||||
async fn get_video_info(&mut self) {
|
sqlx::query_as("SELECT * FROM youtube_meta WHERE id = $1")
|
||||||
log::info!("Fetching Video Metadata for {}", self.hash);
|
.bind(id)
|
||||||
|
.fetch_optional(get_pg!())
|
||||||
let res: Vec<(String, String)> = sqlx::query_as(
|
.await
|
||||||
"SELECT title, path FROM videos WHERE sha256 = $1")
|
.unwrap()
|
||||||
.bind(&self.hash)
|
|
||||||
.fetch_all(&*self.db.unwrap()).await.unwrap();
|
|
||||||
|
|
||||||
let res = res.first().unwrap();
|
|
||||||
|
|
||||||
self.title = Some(res.0.clone());
|
|
||||||
self.path = Some(std::path::Path::new(&res.1).to_path_buf());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_youtube_meta_info(&mut self) {
|
pub async fn tags(&self) -> Vec<String> {
|
||||||
log::info!("Fetching YouTube Metadata for {}", self.hash);
|
let res: Vec<(String,)> =
|
||||||
|
sqlx::query_as("SELECT tag FROM youtube_meta_tags WHERE youtube_id = $1")
|
||||||
|
.bind(&self.id)
|
||||||
|
.fetch_all(get_pg!())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let res: Vec<(String, String, String, String, chrono::NaiveDate, i64)> = sqlx::query_as(
|
res.into_iter().map(|x| x.0).collect()
|
||||||
"SELECT id, description, uploader_name, uploader_id, upload_date, views FROM youtube_meta WHERE id = (SELECT youtube_id FROM videos WHERE sha256 = $1 LIMIT 1)")
|
|
||||||
.bind(&self.hash)
|
|
||||||
.fetch_all(&**self.db.as_ref().unwrap()).await.unwrap();
|
|
||||||
|
|
||||||
if let Some(res) = res.first() {
|
|
||||||
self.youtube_id = Some(res.0.clone());
|
|
||||||
self.description = Some(res.1.clone());
|
|
||||||
self.uploader_name = Some(res.2.clone());
|
|
||||||
self.uploader_id = Some(res.3.clone());
|
|
||||||
self.upload_date = Some(res.4.clone());
|
|
||||||
self.views = Some(res.5);
|
|
||||||
|
|
||||||
let res: Vec<(String,)> = sqlx::query_as(
|
|
||||||
"SELECT category FROM youtube_meta_categories WHERE youtube_id = $1")
|
|
||||||
.bind(self.youtube_id.as_ref().unwrap())
|
|
||||||
.fetch_all(&**self.db.as_ref().unwrap()).await.unwrap();
|
|
||||||
|
|
||||||
self.categories = Some(res.into_iter().map(|x| x.0).collect());
|
|
||||||
|
|
||||||
let res: Vec<(String,)> = sqlx::query_as(
|
|
||||||
"SELECT tag FROM youtube_meta_tags WHERE youtube_id = $1")
|
|
||||||
.bind(self.youtube_id.as_ref().unwrap())
|
|
||||||
.fetch_all(&**self.db.as_ref().unwrap()).await.unwrap();
|
|
||||||
|
|
||||||
self.tags = Some(res.into_iter().map(|x| x.0).collect());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn title(&mut self) -> Option<&str> {
|
pub async fn categories(&self) -> Vec<String> {
|
||||||
if self.title.is_none() {
|
let res: Vec<(String,)> =
|
||||||
self.get_video_info().await;
|
sqlx::query_as("SELECT category FROM youtube_meta_categories WHERE youtube_id = $1")
|
||||||
}
|
.bind(&self.id)
|
||||||
self.title.as_deref()
|
.fetch_all(get_pg!())
|
||||||
}
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
pub async fn path(&mut self) -> Option<PathBuf> {
|
res.into_iter().map(|x| x.0).collect()
|
||||||
if self.path.is_none() {
|
|
||||||
self.get_video_info().await;
|
|
||||||
}
|
|
||||||
self.path.as_ref().map(std::clone::Clone::clone)
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn description(&mut self) -> Option<&str> {
|
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
|
||||||
if self.description.is_none() {
|
pub struct Video {
|
||||||
self.get_youtube_meta_info().await;
|
pub id: uuid::Uuid,
|
||||||
}
|
pub directory: String,
|
||||||
self.description.as_deref()
|
pub path: String,
|
||||||
}
|
pub duration: i64,
|
||||||
|
pub title: String,
|
||||||
pub async fn views(&mut self) -> Option<i64> {
|
youtube_id: Option<String>,
|
||||||
if self.views.is_none() {
|
|
||||||
self.get_youtube_meta_info().await;
|
|
||||||
}
|
|
||||||
self.views
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn uploader_name(&mut self) -> Option<&str> {
|
|
||||||
if self.uploader_name.is_none() {
|
|
||||||
self.get_youtube_meta_info().await;
|
|
||||||
}
|
|
||||||
self.uploader_name.as_deref()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn uploader_id(&mut self) -> Option<&str> {
|
|
||||||
if self.uploader_id.is_none() {
|
|
||||||
self.get_youtube_meta_info().await;
|
|
||||||
}
|
|
||||||
self.uploader_id.as_deref()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn upload_date(&mut self) -> Option<chrono::NaiveDate> {
|
|
||||||
if self.upload_date.is_none() {
|
|
||||||
self.get_youtube_meta_info().await;
|
|
||||||
}
|
|
||||||
self.upload_date
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn categories(&mut self) -> Option<&Vec<String>> {
|
|
||||||
if self.categories.is_none() {
|
|
||||||
self.get_youtube_meta_info().await;
|
|
||||||
}
|
|
||||||
self.categories.as_ref()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn tags(&mut self) -> Option<&Vec<String>> {
|
|
||||||
if self.tags.is_none() {
|
|
||||||
self.get_youtube_meta_info().await;
|
|
||||||
}
|
|
||||||
self.tags.as_ref()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn youtube_id(&mut self) -> Option<&str> {
|
|
||||||
if self.youtube_id.is_none() {
|
|
||||||
self.get_youtube_meta_info().await;
|
|
||||||
}
|
|
||||||
self.youtube_id.as_deref()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn hash(&self) -> &str {
|
|
||||||
&self.hash
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Video Init
|
// Video Init
|
||||||
impl<'a> Video<'a> {
|
impl Video {
|
||||||
pub fn from_hash(hash: &str, db: &'a sqlx::PgPool) -> Self {
|
pub async fn get(id: &uuid::Uuid) -> Option<Self> {
|
||||||
Self {
|
sqlx::query_as("SELECT * FROM videos WHERE id = $1")
|
||||||
hash: hash.to_owned(),
|
.bind(id)
|
||||||
db: Some(db),
|
.fetch_optional(get_pg!())
|
||||||
..Default::default()
|
.await
|
||||||
}
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn insert_path_to_db(db: &'a sqlx::PgPool, v: &PathBuf) -> Option<Self> {
|
pub async fn insert_path_to_db(v: &PathBuf) -> Option<Self> {
|
||||||
|
let db = get_pg!();
|
||||||
|
|
||||||
log::info!("Add {v:?} to library");
|
log::info!("Add {v:?} to library");
|
||||||
let id = calculate_sha256_hash(v.to_str().unwrap()).unwrap();
|
|
||||||
let file_name = v.file_stem().unwrap().to_str().unwrap().to_owned();
|
let file_name = v.file_stem().unwrap().to_str().unwrap().to_owned();
|
||||||
let dir = v
|
let dir = v
|
||||||
.parent()
|
.parent()
|
||||||
|
@ -184,80 +95,70 @@ impl<'a> Video<'a> {
|
||||||
.bind(&meta.upload_date())
|
.bind(&meta.upload_date())
|
||||||
.execute(&mut *tx).await.unwrap();
|
.execute(&mut *tx).await.unwrap();
|
||||||
|
|
||||||
sqlx::query("INSERT INTO videos (sha256, directory, path, title, youtube_id) VALUES ($1, $2, $3, $4, $5)")
|
let vid = sqlx::query_as("INSERT INTO videos (directory, path, title, youtube_id) VALUES ($1, $2, $3, $4, $5)")
|
||||||
.bind(&id)
|
|
||||||
.bind(&dir)
|
.bind(&dir)
|
||||||
.bind(v.to_str().unwrap())
|
.bind(v.to_str().unwrap())
|
||||||
.bind(meta.title())
|
.bind(meta.title())
|
||||||
.bind(meta.youtube_id().unwrap())
|
.bind(meta.youtube_id().unwrap())
|
||||||
.execute(&mut *tx).await.unwrap();
|
.fetch_one(&mut *tx).await.unwrap();
|
||||||
|
|
||||||
for cat in meta.categories().unwrap() {
|
for cat in meta.categories().unwrap() {
|
||||||
sqlx::query("INSERT INTO youtube_meta_categories (youtube_id, category) VALUES ($1, $2)")
|
sqlx::query(
|
||||||
.bind(meta.youtube_id().unwrap()).bind(cat).execute(&mut *tx).await.unwrap();
|
"INSERT INTO youtube_meta_categories (youtube_id, category) VALUES ($1, $2)",
|
||||||
|
)
|
||||||
|
.bind(meta.youtube_id().unwrap())
|
||||||
|
.bind(cat)
|
||||||
|
.execute(&mut *tx)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(tags) = meta.tags() {
|
if let Some(tags) = meta.tags() {
|
||||||
for tag in tags {
|
for tag in tags {
|
||||||
sqlx::query(
|
sqlx::query("INSERT INTO youtube_meta_tags (youtube_id, tag) VALUES ($1, $2)")
|
||||||
"INSERT INTO youtube_meta_tags (youtube_id, tag) VALUES ($1, $2)")
|
.bind(&meta.youtube_id().unwrap())
|
||||||
.bind(&meta.youtube_id().unwrap()).bind(&tag).execute(&mut *tx).await.unwrap();
|
.bind(&tag)
|
||||||
|
.execute(&mut *tx)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
tx.commit().await.unwrap();
|
tx.commit().await.unwrap();
|
||||||
|
|
||||||
return Some(Self {
|
return Some(vid);
|
||||||
directory: Some(dir),
|
|
||||||
path: Some(v.to_owned()),
|
|
||||||
title: Some(meta.title()),
|
|
||||||
hash: id,
|
|
||||||
youtube_id: meta.youtube_id(),
|
|
||||||
description: meta.description(),
|
|
||||||
uploader_name: meta.uploader_name(),
|
|
||||||
uploader_id: meta.uploader_id(),
|
|
||||||
duration: meta.duration(),
|
|
||||||
views: meta.views(),
|
|
||||||
categories: meta.categories(),
|
|
||||||
tags: meta.tags(),
|
|
||||||
upload_date: meta.upload_date(),
|
|
||||||
db: Some(db),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
sqlx::query(
|
let vid = sqlx::query_as(
|
||||||
"INSERT INTO videos (sha256, directory, path, title) VALUES ($1, $2, $3, $4)")
|
"INSERT INTO videos (directory, path, title) VALUES ($1, $2, $3, $4) RETURNING *",
|
||||||
.bind(&id).bind(dir).bind(v.to_str().unwrap()).bind(file_name).execute(&mut *tx).await.unwrap();
|
)
|
||||||
|
.bind(dir)
|
||||||
|
.bind(v.to_str().unwrap())
|
||||||
|
.bind(file_name)
|
||||||
|
.fetch_one(&mut *tx)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
tx.commit().await.unwrap();
|
tx.commit().await.unwrap();
|
||||||
|
|
||||||
Some(Self {
|
Some(vid)
|
||||||
path: Some(v.to_owned()),
|
|
||||||
title: Some(v.file_stem().unwrap().to_str().unwrap().to_owned()),
|
|
||||||
hash: id,
|
|
||||||
db: Some(db),
|
|
||||||
..Self::default()
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Video<'_> {
|
impl ToAPI for Video {
|
||||||
pub async fn api(&mut self) -> serde_json::Value {
|
async fn api(&self) -> serde_json::Value {
|
||||||
self.get_video_info().await;
|
let yt_meta = if let Some(yt_meta) = &self.youtube_id {
|
||||||
self.get_youtube_meta_info().await;
|
YouTubeMeta::get(yt_meta).await
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
json!({
|
json!({
|
||||||
"directory": self.directory.as_ref().unwrap(),
|
"id": &self.id,
|
||||||
"title": self.title.as_ref().unwrap(),
|
"directory": self.directory,
|
||||||
"hash": self.hash,
|
|
||||||
"youtube_id": self.youtube_id,
|
|
||||||
"uploader_name": self.uploader_name,
|
|
||||||
"uploader_id": self.uploader_id,
|
|
||||||
"duration": self.duration,
|
"duration": self.duration,
|
||||||
"views": self.views,
|
"title": self.title,
|
||||||
"categories": self.categories,
|
"yt": yt_meta
|
||||||
"tags": self.tags,
|
|
||||||
"upload_date": self.upload_date,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
50
src/main.rs
50
src/main.rs
|
@ -1,15 +1,30 @@
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use rocket::{http::Method, routes};
|
use rocket::{http::Method, routes};
|
||||||
|
use tokio::sync::OnceCell;
|
||||||
|
|
||||||
mod library;
|
mod library;
|
||||||
mod pages;
|
mod pages;
|
||||||
mod yt_meta;
|
mod yt_meta;
|
||||||
|
|
||||||
// TODO : Add User Auth DB
|
pub static PG: OnceCell<sqlx::PgPool> = OnceCell::const_new();
|
||||||
|
|
||||||
// TODO : Rework into Video Server Backend
|
#[macro_export]
|
||||||
// -> API
|
macro_rules! get_pg {
|
||||||
|
() => {
|
||||||
|
if let Some(client) = $crate::PG.get() {
|
||||||
|
client
|
||||||
|
} else {
|
||||||
|
let client = sqlx::postgres::PgPoolOptions::new()
|
||||||
|
.max_connections(5)
|
||||||
|
.connect(&std::env::var("DATABASE_URL").unwrap())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
$crate::PG.set(client).unwrap();
|
||||||
|
$crate::PG.get().unwrap()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
#[rocket::launch]
|
#[rocket::launch]
|
||||||
async fn launch() -> _ {
|
async fn launch() -> _ {
|
||||||
|
@ -26,11 +41,17 @@ async fn launch() -> _ {
|
||||||
|
|
||||||
let dir_path = args[1].clone();
|
let dir_path = args[1].clone();
|
||||||
|
|
||||||
|
let pg = get_pg!();
|
||||||
|
|
||||||
|
sqlx::migrate!("./migrations").run(pg).await.unwrap();
|
||||||
|
|
||||||
let lib = library::Library::new().await;
|
let lib = library::Library::new().await;
|
||||||
|
|
||||||
let library = lib.clone();
|
let library = lib.clone();
|
||||||
|
|
||||||
library.scan_dir(&Path::new(&dir_path.clone()).to_path_buf()).await;
|
library
|
||||||
|
.scan_dir(&Path::new(&dir_path.clone()).to_path_buf())
|
||||||
|
.await;
|
||||||
|
|
||||||
let cors = rocket_cors::CorsOptions {
|
let cors = rocket_cors::CorsOptions {
|
||||||
allowed_origins: rocket_cors::AllowedOrigins::all(),
|
allowed_origins: rocket_cors::AllowedOrigins::all(),
|
||||||
|
@ -46,15 +67,18 @@ async fn launch() -> _ {
|
||||||
.expect("error creating CORS options");
|
.expect("error creating CORS options");
|
||||||
|
|
||||||
rocket::build()
|
rocket::build()
|
||||||
.mount("/", routes![
|
.mount(
|
||||||
pages::assets::video_file,
|
"/",
|
||||||
pages::assets::video_thumbnail,
|
routes![
|
||||||
pages::index::search,
|
pages::assets::video_file,
|
||||||
pages::index::channel_page,
|
pages::assets::video_thumbnail,
|
||||||
pages::yt::yt_tags,
|
pages::index::search,
|
||||||
pages::yt::yt_tag_page,
|
pages::index::channel_page,
|
||||||
pages::yt::yt_channel_page
|
pages::yt::yt_tags,
|
||||||
])
|
pages::yt::yt_tag_page,
|
||||||
|
pages::yt::yt_channel_page
|
||||||
|
],
|
||||||
|
)
|
||||||
.attach(cors)
|
.attach(cors)
|
||||||
.manage(lib)
|
.manage(lib)
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,28 +4,28 @@ use crate::library::Library;
|
||||||
|
|
||||||
#[get("/video/raw?<v>")]
|
#[get("/video/raw?<v>")]
|
||||||
pub async fn video_file(v: &str, library: &State<Library>) -> Option<NamedFile> {
|
pub async fn video_file(v: &str, library: &State<Library>) -> Option<NamedFile> {
|
||||||
let (_, mut video) = if let Some((channel, video)) = library.get_video_by_hash(v).await {
|
let video = if let Some(video) = library.get_video_by_id(v).await {
|
||||||
(channel, video)
|
video
|
||||||
} else {
|
} else {
|
||||||
library.get_video_by_youtube_id(v).await.unwrap()
|
library.get_video_by_youtube_id(v).await.unwrap()
|
||||||
};
|
};
|
||||||
|
|
||||||
NamedFile::open(video.path().await?).await.ok()
|
NamedFile::open(video.path).await.ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/video/thumbnail?<v>")]
|
#[get("/video/thumbnail?<v>")]
|
||||||
pub async fn video_thumbnail(v: &str, library: &State<Library>) -> Option<NamedFile> {
|
pub async fn video_thumbnail(v: &str, library: &State<Library>) -> Option<NamedFile> {
|
||||||
let (_, mut video) = if let Some((channel, video)) = library.get_video_by_hash(v).await {
|
let video = if let Some(video) = library.get_video_by_id(v).await {
|
||||||
(channel, video)
|
video
|
||||||
} else {
|
} else {
|
||||||
library.get_video_by_youtube_id(v).await.unwrap()
|
library.get_video_by_youtube_id(v).await.unwrap()
|
||||||
};
|
};
|
||||||
|
|
||||||
let path = video.path().await.unwrap();
|
let path = std::path::Path::new(&video.path);
|
||||||
let parent = path.parent().unwrap();
|
let parent = path.parent().unwrap();
|
||||||
let thumbnail_path = path.file_stem().unwrap().to_str().unwrap();
|
let thumbnail_path = path.file_stem().unwrap().to_str().unwrap();
|
||||||
let thumbnail_path = parent.join(thumbnail_path);
|
let thumbnail_path = parent.join(thumbnail_path);
|
||||||
let thumbnail_path = thumbnail_path.to_str().unwrap();
|
let thumbnail_path = thumbnail_path.to_str().unwrap();
|
||||||
|
|
||||||
NamedFile::open(format!("{thumbnail_path}.jpg")).await.ok()
|
NamedFile::open(format!("{thumbnail_path}.jpg")).await.ok()
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,23 +3,27 @@ use serde_json::json;
|
||||||
|
|
||||||
use crate::library::Library;
|
use crate::library::Library;
|
||||||
|
|
||||||
use super::vec_to_api_video_mut;
|
use super::vec_to_api;
|
||||||
|
|
||||||
#[get("/search?<query>&<offset>")]
|
#[get("/search?<query>&<offset>")]
|
||||||
pub async fn search(query: &str, offset: Option<i64>, library: &State<Library>) -> Option<serde_json::Value> {
|
pub async fn search(
|
||||||
const NUM_OF_RESULTS: i64 = 20;
|
query: &str,
|
||||||
|
offset: Option<i64>,
|
||||||
// get start parameter for search result chunks
|
library: &State<Library>,
|
||||||
let start = offset.unwrap_or(0);
|
) -> Option<serde_json::Value> {
|
||||||
|
const NUM_OF_RESULTS: i64 = 20;
|
||||||
let mut video_matches = library.search_video(query, start, NUM_OF_RESULTS).await;
|
|
||||||
|
|
||||||
Some(json!(vec_to_api_video_mut(&mut video_matches).await))
|
// get start parameter for search result chunks
|
||||||
|
let start = offset.unwrap_or(0);
|
||||||
|
|
||||||
|
let mut video_matches = library.search_video(query, start, NUM_OF_RESULTS).await;
|
||||||
|
|
||||||
|
Some(json!(vec_to_api(&mut video_matches).await))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/d/<dir>")]
|
#[get("/d/<dir>")]
|
||||||
pub async fn channel_page(dir: &str, library: &State<Library>) -> Option<serde_json::Value> {
|
pub async fn channel_page(dir: &str, library: &State<Library>) -> Option<serde_json::Value> {
|
||||||
let mut dir_videos = library.get_directory_videos(dir).await;
|
let mut dir_videos = library.get_directory_videos(dir).await;
|
||||||
|
|
||||||
Some(json!(vec_to_api_video_mut(&mut dir_videos).await))
|
Some(json!(vec_to_api(&mut dir_videos).await))
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,13 +18,3 @@ pub async fn vec_to_api(items: &[impl ToAPI]) -> Vec<serde_json::Value> {
|
||||||
|
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn vec_to_api_video_mut(items: &mut [crate::library::Video<'_>]) -> Vec<serde_json::Value> {
|
|
||||||
let mut ret = Vec::with_capacity(items.len());
|
|
||||||
|
|
||||||
for e in items {
|
|
||||||
ret.push(e.api().await);
|
|
||||||
}
|
|
||||||
|
|
||||||
ret
|
|
||||||
}
|
|
|
@ -1,7 +1,7 @@
|
||||||
use rocket::{get, State};
|
use rocket::{get, State};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
use crate::{library::Library, pages::vec_to_api_video_mut};
|
use crate::{library::Library, pages::vec_to_api};
|
||||||
|
|
||||||
#[get("/yt/tags")]
|
#[get("/yt/tags")]
|
||||||
pub async fn yt_tags(library: &State<Library>) -> serde_json::Value {
|
pub async fn yt_tags(library: &State<Library>) -> serde_json::Value {
|
||||||
|
@ -13,11 +13,11 @@ pub async fn yt_tags(library: &State<Library>) -> serde_json::Value {
|
||||||
pub async fn yt_tag_page(tag: &str, library: &State<Library>) -> serde_json::Value {
|
pub async fn yt_tag_page(tag: &str, library: &State<Library>) -> serde_json::Value {
|
||||||
let mut channel_videos = library.get_videos_by_tag_yt(&tag).await;
|
let mut channel_videos = library.get_videos_by_tag_yt(&tag).await;
|
||||||
|
|
||||||
json!(vec_to_api_video_mut(&mut channel_videos).await)
|
json!(vec_to_api(&mut channel_videos).await)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/yt/c/<channel>")]
|
#[get("/yt/c/<channel>")]
|
||||||
pub async fn yt_channel_page(channel: &str, library: &State<Library>) -> serde_json::Value {
|
pub async fn yt_channel_page(channel: &str, library: &State<Library>) -> serde_json::Value {
|
||||||
let mut channel_videos = library.get_channel_videos_yt(&channel).await;
|
let mut channel_videos = library.get_channel_videos_yt(&channel).await;
|
||||||
json!(vec_to_api_video_mut(&mut channel_videos).await)
|
json!(vec_to_api(&mut channel_videos).await)
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue