This commit is contained in:
JMARyA 2024-10-07 09:24:54 +02:00
parent 1979fc246e
commit 224bdf77ab
Signed by: jmarya
GPG key ID: 901B2ADDF27C2263
9 changed files with 207 additions and 367 deletions

View file

@ -25,12 +25,11 @@ CREATE TABLE IF NOT EXISTS "youtube_meta_categories" (
);
CREATE TABLE IF NOT EXISTS "videos" (
"sha256" TEXT NOT NULL,
"id" UUID NOT NULL PRIMARY KEY DEFAULT gen_random_uuid(),
"directory" TEXT NOT NULL,
"path" TEXT NOT NULL,
"duration" INTEGER,
"title" TEXT,
"youtube_id" TEXT,
PRIMARY KEY("sha256"),
FOREIGN KEY("youtube_id") REFERENCES "youtube_meta"("id")
);

View file

@ -1,6 +1,3 @@
use ring::digest::{Context, SHA256};
use std::io::Read;
pub fn is_video_file(filename: &str) -> bool {
let video_extensions = vec!["mp4", "mkv", "webm"];
@ -11,21 +8,3 @@ pub fn is_video_file(filename: &str) -> bool {
}
false
}
pub fn calculate_sha256_hash(file_path: &str) -> std::io::Result<String> {
log::info!("Hashing {file_path}");
let mut context = Context::new(&SHA256);
let mut file = std::fs::File::open(file_path)?;
let mut buffer = [0u8; 4096];
loop {
let n = file.read(&mut buffer)?;
if n == 0 {
break;
}
context.update(&buffer[..n]);
}
let digest = context.finish();
Ok(hex::encode(digest.as_ref()))
}

View file

@ -42,21 +42,11 @@ impl Library {
}
pub async fn get_directory_videos(&self, dir: &str) -> Vec<Video> {
let videos_ids: Vec<(String, )> = sqlx::query_as(
"SELECT sha256 FROM videos INNER JOIN youtube_meta ON youtube_meta.id = videos.youtube_id WHERE directory = ?1 ORDER BY youtube_meta.upload_date DESC;")
sqlx::query_as("SELECT * FROM videos WHERE directory = ?1")
.bind(dir)
.fetch_all(&self.conn).await.unwrap();
let mut videos: Vec<Video<'_>> = Vec::new();
for video in videos_ids {
videos.push(Video::from_hash(
&video.0,
&self.conn
));
}
videos
.fetch_all(&self.conn)
.await
.unwrap()
}
// YT
@ -81,95 +71,43 @@ videos.push(Video::from_hash(
}
pub async fn get_videos_by_tag_yt(&self, tag: &str) -> Vec<Video> {
let videos_ids: Vec<(String, )> = sqlx::query_as(
"SELECT sha256 FROM youtube_meta_tags INNER JOIN youtube_meta ON youtube_meta_tags.youtube_id = youtube_meta.id INNER JOIN videos ON videos.youtube_id = youtube_meta.id WHERE tag = $1;")
sqlx::query_as(
"SELECT * FROM youtube_meta_tags INNER JOIN youtube_meta ON youtube_meta_tags.youtube_id = youtube_meta.id INNER JOIN videos ON videos.youtube_id = youtube_meta.id WHERE tag = $1")
.bind(tag)
.fetch_all(&self.conn).await.unwrap();
let mut videos: Vec<Video<'_>> = Vec::new();
for video in videos_ids {
videos.push(Video::from_hash(
&video.0,
&self.conn
));
}
videos
.fetch_all(&self.conn).await.unwrap()
}
pub async fn get_channel_videos_yt(&self, id: &str) -> Vec<Video> {
let videos_ids: Vec<(String, )> = sqlx::query_as(
"SELECT sha256 FROM youtube_meta INNER JOIN videos ON youtube_meta.id = videos.youtube_id WHERE uploader_id = $1 ORDER BY youtube_meta.upload_date DESC;")
sqlx::query_as(
"SELECT * FROM youtube_meta INNER JOIN videos ON youtube_meta.id = videos.youtube_id WHERE uploader_id = $1 ORDER BY youtube_meta.upload_date DESC;")
.bind(id)
.fetch_all(&self.conn).await.unwrap();
let mut videos: Vec<Video<'_>> = Vec::new();
for video in videos_ids {
videos.push(Video::from_hash(
&video.0,
&self.conn
));
}
videos
.fetch_all(&self.conn).await.unwrap()
}
// videos
pub async fn get_random_videos(&self, n: i64) -> Vec<Video> {
let videos_ids: Vec<(String, )> = sqlx::query_as(
"SELECT sha256 FROM videos ORDER BY RANDOM() LIMIT $1;")
sqlx::query_as("SELECT * FROM videos ORDER BY RANDOM() LIMIT $1;")
.bind(n)
.fetch_all(&self.conn).await.unwrap();
let mut videos: Vec<Video<'_>> = Vec::new();
for video in videos_ids {
videos.push(Video::from_hash(
&video.0,
&self.conn
));
}
videos
.fetch_all(&self.conn)
.await
.unwrap()
}
pub async fn get_video_by_hash(&self, hash: &str) -> Option<(String, Video)> {
let res: Vec<(String, Video)> = sqlx::query_as::<sqlx::Postgres, (String, String)>(
"SELECT sha256, directory FROM videos WHERE sha256 = $1"
)
.bind(hash)
.fetch_all(&self.conn).await.unwrap().into_iter()
.map(|x| {
(x.1, Video::from_hash(&x.0, &self.conn))
}).collect();
if !res.is_empty() {
return res.first().map(std::borrow::ToOwned::to_owned);
}
None
}
pub async fn get_video_by_youtube_id(&self, id: &str) -> Option<(String, Video)> {
let res: Vec<(String, Video<'_>)> = sqlx::query_as(
"SELECT sha256, directory FROM videos WHERE youtube_id = $1")
pub async fn get_video_by_id(&self, id: &str) -> Option<Video> {
sqlx::query_as("SELECT * FROM videos WHERE id = $1")
.bind(id)
.fetch_all(&self.conn).await.unwrap().into_iter().map(|x: (String, String)| {
(
x.1,
Video::from_hash(&x.0, &self.conn),
)
}).collect();
.fetch_optional(&self.conn)
.await
.unwrap()
}
if !res.is_empty() {
return res.first().map(std::borrow::ToOwned::to_owned);
}
None
pub async fn get_video_by_youtube_id(&self, id: &str) -> Option<Video> {
sqlx::query_as("SELECT * FROM videos WHERE youtube_id = $1")
.bind(id)
.fetch_optional(&self.conn)
.await
.unwrap()
}
pub async fn has_path(&self, path: &Path) -> bool {
@ -186,9 +124,9 @@ videos.push(Video::from_hash(
pub async fn search_video(&self, query: &str, start: i64, n: i64) -> Vec<Video> {
let query = format!("%{query}%");
sqlx::query_as(
let ids: Vec<(uuid::Uuid,)> = sqlx::query_as(
r#"SELECT DISTINCT
vm.sha256,
vm.id,
( -- Calculate a score for the video based on matches
(ym.title LIKE $1) +
(ym.description LIKE $1) +
@ -209,17 +147,22 @@ videos.push(Video::from_hash(
(ymt.tag LIKE $1)
ORDER BY
score DESC,
ym.upload_date DESC LIMIT $2 OFFSET $3;"#
ym.upload_date DESC LIMIT $2 OFFSET $3;"#,
)
.bind(query)
.bind(n)
.bind(start)
.fetch_all(&self.conn).await.unwrap().into_iter().map(|x: (String, i64)| {
Video::from_hash(
&x.0,
&self.conn
)
}).collect()
.fetch_all(&self.conn)
.await
.unwrap();
let mut videos = Vec::new();
for id in ids {
videos.push(Video::get(&id.0).await.unwrap());
}
videos
}
}
@ -230,7 +173,7 @@ impl Library {
let lib = self.get_video_paths(dir).await;
for path in lib {
Video::insert_path_to_db(&self.conn, &path).await;
Video::insert_path_to_db(&path).await;
}
log::info!("Finished scanning {dir:?}");

View file

@ -1,165 +1,76 @@
use std::path::PathBuf;
use crate::{get_pg, pages::ToAPI, yt_meta};
use serde::{Deserialize, Serialize};
use serde_json::json;
use sqlx::prelude::FromRow;
use std::path::PathBuf;
use crate::library::func::calculate_sha256_hash;
use crate::yt_meta;
// todo : optimize
#[derive(Debug, Default, Clone)]
pub struct Video<'a> {
directory: Option<String>,
path: Option<PathBuf>,
title: Option<String>,
hash: String,
youtube_id: Option<String>,
description: Option<String>,
uploader_name: Option<String>,
uploader_id: Option<String>,
duration: Option<i64>,
views: Option<i64>,
categories: Option<Vec<String>>,
tags: Option<Vec<String>>,
upload_date: Option<chrono::NaiveDate>,
db: Option<&'a sqlx::PgPool>,
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
pub struct YouTubeMeta {
pub id: String,
pub title: String,
pub uploader_name: String,
pub uploader_id: String,
pub duration: i64,
pub views: i64,
pub upload_date: chrono::NaiveDate,
}
// Video properties
impl<'a> Video<'a> {
async fn get_video_info(&mut self) {
log::info!("Fetching Video Metadata for {}", self.hash);
let res: Vec<(String, String)> = sqlx::query_as(
"SELECT title, path FROM videos WHERE sha256 = $1")
.bind(&self.hash)
.fetch_all(&*self.db.unwrap()).await.unwrap();
let res = res.first().unwrap();
self.title = Some(res.0.clone());
self.path = Some(std::path::Path::new(&res.1).to_path_buf());
impl YouTubeMeta {
pub async fn get(id: &str) -> Option<Self> {
sqlx::query_as("SELECT * FROM youtube_meta WHERE id = $1")
.bind(id)
.fetch_optional(get_pg!())
.await
.unwrap()
}
async fn get_youtube_meta_info(&mut self) {
log::info!("Fetching YouTube Metadata for {}", self.hash);
pub async fn tags(&self) -> Vec<String> {
let res: Vec<(String,)> =
sqlx::query_as("SELECT tag FROM youtube_meta_tags WHERE youtube_id = $1")
.bind(&self.id)
.fetch_all(get_pg!())
.await
.unwrap();
let res: Vec<(String, String, String, String, chrono::NaiveDate, i64)> = sqlx::query_as(
"SELECT id, description, uploader_name, uploader_id, upload_date, views FROM youtube_meta WHERE id = (SELECT youtube_id FROM videos WHERE sha256 = $1 LIMIT 1)")
.bind(&self.hash)
.fetch_all(&**self.db.as_ref().unwrap()).await.unwrap();
if let Some(res) = res.first() {
self.youtube_id = Some(res.0.clone());
self.description = Some(res.1.clone());
self.uploader_name = Some(res.2.clone());
self.uploader_id = Some(res.3.clone());
self.upload_date = Some(res.4.clone());
self.views = Some(res.5);
let res: Vec<(String,)> = sqlx::query_as(
"SELECT category FROM youtube_meta_categories WHERE youtube_id = $1")
.bind(self.youtube_id.as_ref().unwrap())
.fetch_all(&**self.db.as_ref().unwrap()).await.unwrap();
self.categories = Some(res.into_iter().map(|x| x.0).collect());
let res: Vec<(String,)> = sqlx::query_as(
"SELECT tag FROM youtube_meta_tags WHERE youtube_id = $1")
.bind(self.youtube_id.as_ref().unwrap())
.fetch_all(&**self.db.as_ref().unwrap()).await.unwrap();
self.tags = Some(res.into_iter().map(|x| x.0).collect());
}
res.into_iter().map(|x| x.0).collect()
}
pub async fn title(&mut self) -> Option<&str> {
if self.title.is_none() {
self.get_video_info().await;
}
self.title.as_deref()
}
pub async fn categories(&self) -> Vec<String> {
let res: Vec<(String,)> =
sqlx::query_as("SELECT category FROM youtube_meta_categories WHERE youtube_id = $1")
.bind(&self.id)
.fetch_all(get_pg!())
.await
.unwrap();
pub async fn path(&mut self) -> Option<PathBuf> {
if self.path.is_none() {
self.get_video_info().await;
}
self.path.as_ref().map(std::clone::Clone::clone)
res.into_iter().map(|x| x.0).collect()
}
}
pub async fn description(&mut self) -> Option<&str> {
if self.description.is_none() {
self.get_youtube_meta_info().await;
}
self.description.as_deref()
}
pub async fn views(&mut self) -> Option<i64> {
if self.views.is_none() {
self.get_youtube_meta_info().await;
}
self.views
}
pub async fn uploader_name(&mut self) -> Option<&str> {
if self.uploader_name.is_none() {
self.get_youtube_meta_info().await;
}
self.uploader_name.as_deref()
}
pub async fn uploader_id(&mut self) -> Option<&str> {
if self.uploader_id.is_none() {
self.get_youtube_meta_info().await;
}
self.uploader_id.as_deref()
}
pub async fn upload_date(&mut self) -> Option<chrono::NaiveDate> {
if self.upload_date.is_none() {
self.get_youtube_meta_info().await;
}
self.upload_date
}
pub async fn categories(&mut self) -> Option<&Vec<String>> {
if self.categories.is_none() {
self.get_youtube_meta_info().await;
}
self.categories.as_ref()
}
pub async fn tags(&mut self) -> Option<&Vec<String>> {
if self.tags.is_none() {
self.get_youtube_meta_info().await;
}
self.tags.as_ref()
}
pub async fn youtube_id(&mut self) -> Option<&str> {
if self.youtube_id.is_none() {
self.get_youtube_meta_info().await;
}
self.youtube_id.as_deref()
}
pub fn hash(&self) -> &str {
&self.hash
}
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
pub struct Video {
pub id: uuid::Uuid,
pub directory: String,
pub path: String,
pub duration: i64,
pub title: String,
youtube_id: Option<String>,
}
// Video Init
impl<'a> Video<'a> {
pub fn from_hash(hash: &str, db: &'a sqlx::PgPool) -> Self {
Self {
hash: hash.to_owned(),
db: Some(db),
..Default::default()
}
impl Video {
pub async fn get(id: &uuid::Uuid) -> Option<Self> {
sqlx::query_as("SELECT * FROM videos WHERE id = $1")
.bind(id)
.fetch_optional(get_pg!())
.await
.unwrap()
}
pub async fn insert_path_to_db(db: &'a sqlx::PgPool, v: &PathBuf) -> Option<Self> {
pub async fn insert_path_to_db(v: &PathBuf) -> Option<Self> {
let db = get_pg!();
log::info!("Add {v:?} to library");
let id = calculate_sha256_hash(v.to_str().unwrap()).unwrap();
let file_name = v.file_stem().unwrap().to_str().unwrap().to_owned();
let dir = v
.parent()
@ -184,80 +95,70 @@ impl<'a> Video<'a> {
.bind(&meta.upload_date())
.execute(&mut *tx).await.unwrap();
sqlx::query("INSERT INTO videos (sha256, directory, path, title, youtube_id) VALUES ($1, $2, $3, $4, $5)")
.bind(&id)
let vid = sqlx::query_as("INSERT INTO videos (directory, path, title, youtube_id) VALUES ($1, $2, $3, $4, $5)")
.bind(&dir)
.bind(v.to_str().unwrap())
.bind(meta.title())
.bind(meta.youtube_id().unwrap())
.execute(&mut *tx).await.unwrap();
.fetch_one(&mut *tx).await.unwrap();
for cat in meta.categories().unwrap() {
sqlx::query("INSERT INTO youtube_meta_categories (youtube_id, category) VALUES ($1, $2)")
.bind(meta.youtube_id().unwrap()).bind(cat).execute(&mut *tx).await.unwrap();
sqlx::query(
"INSERT INTO youtube_meta_categories (youtube_id, category) VALUES ($1, $2)",
)
.bind(meta.youtube_id().unwrap())
.bind(cat)
.execute(&mut *tx)
.await
.unwrap();
}
if let Some(tags) = meta.tags() {
for tag in tags {
sqlx::query(
"INSERT INTO youtube_meta_tags (youtube_id, tag) VALUES ($1, $2)")
.bind(&meta.youtube_id().unwrap()).bind(&tag).execute(&mut *tx).await.unwrap();
sqlx::query("INSERT INTO youtube_meta_tags (youtube_id, tag) VALUES ($1, $2)")
.bind(&meta.youtube_id().unwrap())
.bind(&tag)
.execute(&mut *tx)
.await
.unwrap();
}
}
tx.commit().await.unwrap();
return Some(Self {
directory: Some(dir),
path: Some(v.to_owned()),
title: Some(meta.title()),
hash: id,
youtube_id: meta.youtube_id(),
description: meta.description(),
uploader_name: meta.uploader_name(),
uploader_id: meta.uploader_id(),
duration: meta.duration(),
views: meta.views(),
categories: meta.categories(),
tags: meta.tags(),
upload_date: meta.upload_date(),
db: Some(db),
});
return Some(vid);
}
sqlx::query(
"INSERT INTO videos (sha256, directory, path, title) VALUES ($1, $2, $3, $4)")
.bind(&id).bind(dir).bind(v.to_str().unwrap()).bind(file_name).execute(&mut *tx).await.unwrap();
let vid = sqlx::query_as(
"INSERT INTO videos (directory, path, title) VALUES ($1, $2, $3, $4) RETURNING *",
)
.bind(dir)
.bind(v.to_str().unwrap())
.bind(file_name)
.fetch_one(&mut *tx)
.await
.unwrap();
tx.commit().await.unwrap();
Some(Self {
path: Some(v.to_owned()),
title: Some(v.file_stem().unwrap().to_str().unwrap().to_owned()),
hash: id,
db: Some(db),
..Self::default()
})
Some(vid)
}
}
impl Video<'_> {
pub async fn api(&mut self) -> serde_json::Value {
self.get_video_info().await;
self.get_youtube_meta_info().await;
impl ToAPI for Video {
async fn api(&self) -> serde_json::Value {
let yt_meta = if let Some(yt_meta) = &self.youtube_id {
YouTubeMeta::get(yt_meta).await
} else {
None
};
json!({
"directory": self.directory.as_ref().unwrap(),
"title": self.title.as_ref().unwrap(),
"hash": self.hash,
"youtube_id": self.youtube_id,
"uploader_name": self.uploader_name,
"uploader_id": self.uploader_id,
"id": &self.id,
"directory": self.directory,
"duration": self.duration,
"views": self.views,
"categories": self.categories,
"tags": self.tags,
"upload_date": self.upload_date,
"title": self.title,
"yt": yt_meta
})
}
}
}

View file

@ -1,15 +1,30 @@
use std::path::Path;
use rocket::{http::Method, routes};
use tokio::sync::OnceCell;
mod library;
mod pages;
mod yt_meta;
// TODO : Add User Auth DB
pub static PG: OnceCell<sqlx::PgPool> = OnceCell::const_new();
// TODO : Rework into Video Server Backend
// -> API
#[macro_export]
macro_rules! get_pg {
() => {
if let Some(client) = $crate::PG.get() {
client
} else {
let client = sqlx::postgres::PgPoolOptions::new()
.max_connections(5)
.connect(&std::env::var("DATABASE_URL").unwrap())
.await
.unwrap();
$crate::PG.set(client).unwrap();
$crate::PG.get().unwrap()
}
};
}
#[rocket::launch]
async fn launch() -> _ {
@ -26,11 +41,17 @@ async fn launch() -> _ {
let dir_path = args[1].clone();
let pg = get_pg!();
sqlx::migrate!("./migrations").run(pg).await.unwrap();
let lib = library::Library::new().await;
let library = lib.clone();
library.scan_dir(&Path::new(&dir_path.clone()).to_path_buf()).await;
library
.scan_dir(&Path::new(&dir_path.clone()).to_path_buf())
.await;
let cors = rocket_cors::CorsOptions {
allowed_origins: rocket_cors::AllowedOrigins::all(),
@ -46,15 +67,18 @@ async fn launch() -> _ {
.expect("error creating CORS options");
rocket::build()
.mount("/", routes![
pages::assets::video_file,
pages::assets::video_thumbnail,
pages::index::search,
pages::index::channel_page,
pages::yt::yt_tags,
pages::yt::yt_tag_page,
pages::yt::yt_channel_page
])
.mount(
"/",
routes![
pages::assets::video_file,
pages::assets::video_thumbnail,
pages::index::search,
pages::index::channel_page,
pages::yt::yt_tags,
pages::yt::yt_tag_page,
pages::yt::yt_channel_page
],
)
.attach(cors)
.manage(lib)
}

View file

@ -4,28 +4,28 @@ use crate::library::Library;
#[get("/video/raw?<v>")]
pub async fn video_file(v: &str, library: &State<Library>) -> Option<NamedFile> {
let (_, mut video) = if let Some((channel, video)) = library.get_video_by_hash(v).await {
(channel, video)
} else {
library.get_video_by_youtube_id(v).await.unwrap()
};
let video = if let Some(video) = library.get_video_by_id(v).await {
video
} else {
library.get_video_by_youtube_id(v).await.unwrap()
};
NamedFile::open(video.path().await?).await.ok()
NamedFile::open(video.path).await.ok()
}
#[get("/video/thumbnail?<v>")]
pub async fn video_thumbnail(v: &str, library: &State<Library>) -> Option<NamedFile> {
let (_, mut video) = if let Some((channel, video)) = library.get_video_by_hash(v).await {
(channel, video)
} else {
library.get_video_by_youtube_id(v).await.unwrap()
};
let video = if let Some(video) = library.get_video_by_id(v).await {
video
} else {
library.get_video_by_youtube_id(v).await.unwrap()
};
let path = video.path().await.unwrap();
let parent = path.parent().unwrap();
let thumbnail_path = path.file_stem().unwrap().to_str().unwrap();
let thumbnail_path = parent.join(thumbnail_path);
let thumbnail_path = thumbnail_path.to_str().unwrap();
let path = std::path::Path::new(&video.path);
let parent = path.parent().unwrap();
let thumbnail_path = path.file_stem().unwrap().to_str().unwrap();
let thumbnail_path = parent.join(thumbnail_path);
let thumbnail_path = thumbnail_path.to_str().unwrap();
NamedFile::open(format!("{thumbnail_path}.jpg")).await.ok()
NamedFile::open(format!("{thumbnail_path}.jpg")).await.ok()
}

View file

@ -3,23 +3,27 @@ use serde_json::json;
use crate::library::Library;
use super::vec_to_api_video_mut;
use super::vec_to_api;
#[get("/search?<query>&<offset>")]
pub async fn search(query: &str, offset: Option<i64>, library: &State<Library>) -> Option<serde_json::Value> {
const NUM_OF_RESULTS: i64 = 20;
// get start parameter for search result chunks
let start = offset.unwrap_or(0);
let mut video_matches = library.search_video(query, start, NUM_OF_RESULTS).await;
pub async fn search(
query: &str,
offset: Option<i64>,
library: &State<Library>,
) -> Option<serde_json::Value> {
const NUM_OF_RESULTS: i64 = 20;
Some(json!(vec_to_api_video_mut(&mut video_matches).await))
// get start parameter for search result chunks
let start = offset.unwrap_or(0);
let mut video_matches = library.search_video(query, start, NUM_OF_RESULTS).await;
Some(json!(vec_to_api(&mut video_matches).await))
}
#[get("/d/<dir>")]
pub async fn channel_page(dir: &str, library: &State<Library>) -> Option<serde_json::Value> {
let mut dir_videos = library.get_directory_videos(dir).await;
Some(json!(vec_to_api_video_mut(&mut dir_videos).await))
Some(json!(vec_to_api(&mut dir_videos).await))
}

View file

@ -18,13 +18,3 @@ pub async fn vec_to_api(items: &[impl ToAPI]) -> Vec<serde_json::Value> {
ret
}
pub async fn vec_to_api_video_mut(items: &mut [crate::library::Video<'_>]) -> Vec<serde_json::Value> {
let mut ret = Vec::with_capacity(items.len());
for e in items {
ret.push(e.api().await);
}
ret
}

View file

@ -1,7 +1,7 @@
use rocket::{get, State};
use serde_json::json;
use crate::{library::Library, pages::vec_to_api_video_mut};
use crate::{library::Library, pages::vec_to_api};
#[get("/yt/tags")]
pub async fn yt_tags(library: &State<Library>) -> serde_json::Value {
@ -13,11 +13,11 @@ pub async fn yt_tags(library: &State<Library>) -> serde_json::Value {
pub async fn yt_tag_page(tag: &str, library: &State<Library>) -> serde_json::Value {
let mut channel_videos = library.get_videos_by_tag_yt(&tag).await;
json!(vec_to_api_video_mut(&mut channel_videos).await)
json!(vec_to_api(&mut channel_videos).await)
}
#[get("/yt/c/<channel>")]
pub async fn yt_channel_page(channel: &str, library: &State<Library>) -> serde_json::Value {
let mut channel_videos = library.get_channel_videos_yt(&channel).await;
json!(vec_to_api_video_mut(&mut channel_videos).await)
json!(vec_to_api(&mut channel_videos).await)
}