update
This commit is contained in:
parent
1979fc246e
commit
224bdf77ab
9 changed files with 207 additions and 367 deletions
|
@ -1,6 +1,3 @@
|
|||
use ring::digest::{Context, SHA256};
|
||||
use std::io::Read;
|
||||
|
||||
pub fn is_video_file(filename: &str) -> bool {
|
||||
let video_extensions = vec!["mp4", "mkv", "webm"];
|
||||
|
||||
|
@ -11,21 +8,3 @@ pub fn is_video_file(filename: &str) -> bool {
|
|||
}
|
||||
false
|
||||
}
|
||||
|
||||
pub fn calculate_sha256_hash(file_path: &str) -> std::io::Result<String> {
|
||||
log::info!("Hashing {file_path}");
|
||||
let mut context = Context::new(&SHA256);
|
||||
let mut file = std::fs::File::open(file_path)?;
|
||||
|
||||
let mut buffer = [0u8; 4096];
|
||||
loop {
|
||||
let n = file.read(&mut buffer)?;
|
||||
if n == 0 {
|
||||
break;
|
||||
}
|
||||
context.update(&buffer[..n]);
|
||||
}
|
||||
|
||||
let digest = context.finish();
|
||||
Ok(hex::encode(digest.as_ref()))
|
||||
}
|
||||
|
|
|
@ -42,21 +42,11 @@ impl Library {
|
|||
}
|
||||
|
||||
pub async fn get_directory_videos(&self, dir: &str) -> Vec<Video> {
|
||||
let videos_ids: Vec<(String, )> = sqlx::query_as(
|
||||
"SELECT sha256 FROM videos INNER JOIN youtube_meta ON youtube_meta.id = videos.youtube_id WHERE directory = ?1 ORDER BY youtube_meta.upload_date DESC;")
|
||||
sqlx::query_as("SELECT * FROM videos WHERE directory = ?1")
|
||||
.bind(dir)
|
||||
.fetch_all(&self.conn).await.unwrap();
|
||||
|
||||
let mut videos: Vec<Video<'_>> = Vec::new();
|
||||
|
||||
for video in videos_ids {
|
||||
videos.push(Video::from_hash(
|
||||
&video.0,
|
||||
&self.conn
|
||||
));
|
||||
}
|
||||
|
||||
videos
|
||||
.fetch_all(&self.conn)
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
// YT
|
||||
|
@ -81,95 +71,43 @@ videos.push(Video::from_hash(
|
|||
}
|
||||
|
||||
pub async fn get_videos_by_tag_yt(&self, tag: &str) -> Vec<Video> {
|
||||
let videos_ids: Vec<(String, )> = sqlx::query_as(
|
||||
"SELECT sha256 FROM youtube_meta_tags INNER JOIN youtube_meta ON youtube_meta_tags.youtube_id = youtube_meta.id INNER JOIN videos ON videos.youtube_id = youtube_meta.id WHERE tag = $1;")
|
||||
sqlx::query_as(
|
||||
"SELECT * FROM youtube_meta_tags INNER JOIN youtube_meta ON youtube_meta_tags.youtube_id = youtube_meta.id INNER JOIN videos ON videos.youtube_id = youtube_meta.id WHERE tag = $1")
|
||||
.bind(tag)
|
||||
.fetch_all(&self.conn).await.unwrap();
|
||||
|
||||
let mut videos: Vec<Video<'_>> = Vec::new();
|
||||
|
||||
for video in videos_ids {
|
||||
videos.push(Video::from_hash(
|
||||
&video.0,
|
||||
&self.conn
|
||||
));
|
||||
}
|
||||
|
||||
videos
|
||||
.fetch_all(&self.conn).await.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_channel_videos_yt(&self, id: &str) -> Vec<Video> {
|
||||
let videos_ids: Vec<(String, )> = sqlx::query_as(
|
||||
"SELECT sha256 FROM youtube_meta INNER JOIN videos ON youtube_meta.id = videos.youtube_id WHERE uploader_id = $1 ORDER BY youtube_meta.upload_date DESC;")
|
||||
sqlx::query_as(
|
||||
"SELECT * FROM youtube_meta INNER JOIN videos ON youtube_meta.id = videos.youtube_id WHERE uploader_id = $1 ORDER BY youtube_meta.upload_date DESC;")
|
||||
.bind(id)
|
||||
.fetch_all(&self.conn).await.unwrap();
|
||||
|
||||
let mut videos: Vec<Video<'_>> = Vec::new();
|
||||
|
||||
for video in videos_ids {
|
||||
videos.push(Video::from_hash(
|
||||
&video.0,
|
||||
&self.conn
|
||||
));
|
||||
}
|
||||
|
||||
videos
|
||||
.fetch_all(&self.conn).await.unwrap()
|
||||
}
|
||||
|
||||
// videos
|
||||
|
||||
pub async fn get_random_videos(&self, n: i64) -> Vec<Video> {
|
||||
let videos_ids: Vec<(String, )> = sqlx::query_as(
|
||||
"SELECT sha256 FROM videos ORDER BY RANDOM() LIMIT $1;")
|
||||
sqlx::query_as("SELECT * FROM videos ORDER BY RANDOM() LIMIT $1;")
|
||||
.bind(n)
|
||||
.fetch_all(&self.conn).await.unwrap();
|
||||
|
||||
|
||||
let mut videos: Vec<Video<'_>> = Vec::new();
|
||||
|
||||
for video in videos_ids {
|
||||
videos.push(Video::from_hash(
|
||||
&video.0,
|
||||
&self.conn
|
||||
));
|
||||
}
|
||||
|
||||
videos
|
||||
.fetch_all(&self.conn)
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_video_by_hash(&self, hash: &str) -> Option<(String, Video)> {
|
||||
let res: Vec<(String, Video)> = sqlx::query_as::<sqlx::Postgres, (String, String)>(
|
||||
"SELECT sha256, directory FROM videos WHERE sha256 = $1"
|
||||
)
|
||||
.bind(hash)
|
||||
.fetch_all(&self.conn).await.unwrap().into_iter()
|
||||
.map(|x| {
|
||||
(x.1, Video::from_hash(&x.0, &self.conn))
|
||||
}).collect();
|
||||
|
||||
if !res.is_empty() {
|
||||
return res.first().map(std::borrow::ToOwned::to_owned);
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub async fn get_video_by_youtube_id(&self, id: &str) -> Option<(String, Video)> {
|
||||
let res: Vec<(String, Video<'_>)> = sqlx::query_as(
|
||||
"SELECT sha256, directory FROM videos WHERE youtube_id = $1")
|
||||
pub async fn get_video_by_id(&self, id: &str) -> Option<Video> {
|
||||
sqlx::query_as("SELECT * FROM videos WHERE id = $1")
|
||||
.bind(id)
|
||||
.fetch_all(&self.conn).await.unwrap().into_iter().map(|x: (String, String)| {
|
||||
(
|
||||
x.1,
|
||||
Video::from_hash(&x.0, &self.conn),
|
||||
)
|
||||
}).collect();
|
||||
.fetch_optional(&self.conn)
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
if !res.is_empty() {
|
||||
return res.first().map(std::borrow::ToOwned::to_owned);
|
||||
}
|
||||
|
||||
None
|
||||
pub async fn get_video_by_youtube_id(&self, id: &str) -> Option<Video> {
|
||||
sqlx::query_as("SELECT * FROM videos WHERE youtube_id = $1")
|
||||
.bind(id)
|
||||
.fetch_optional(&self.conn)
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn has_path(&self, path: &Path) -> bool {
|
||||
|
@ -186,9 +124,9 @@ videos.push(Video::from_hash(
|
|||
pub async fn search_video(&self, query: &str, start: i64, n: i64) -> Vec<Video> {
|
||||
let query = format!("%{query}%");
|
||||
|
||||
sqlx::query_as(
|
||||
let ids: Vec<(uuid::Uuid,)> = sqlx::query_as(
|
||||
r#"SELECT DISTINCT
|
||||
vm.sha256,
|
||||
vm.id,
|
||||
( -- Calculate a score for the video based on matches
|
||||
(ym.title LIKE $1) +
|
||||
(ym.description LIKE $1) +
|
||||
|
@ -209,17 +147,22 @@ videos.push(Video::from_hash(
|
|||
(ymt.tag LIKE $1)
|
||||
ORDER BY
|
||||
score DESC,
|
||||
ym.upload_date DESC LIMIT $2 OFFSET $3;"#
|
||||
ym.upload_date DESC LIMIT $2 OFFSET $3;"#,
|
||||
)
|
||||
.bind(query)
|
||||
.bind(n)
|
||||
.bind(start)
|
||||
.fetch_all(&self.conn).await.unwrap().into_iter().map(|x: (String, i64)| {
|
||||
Video::from_hash(
|
||||
&x.0,
|
||||
&self.conn
|
||||
)
|
||||
}).collect()
|
||||
.fetch_all(&self.conn)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut videos = Vec::new();
|
||||
|
||||
for id in ids {
|
||||
videos.push(Video::get(&id.0).await.unwrap());
|
||||
}
|
||||
|
||||
videos
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -230,7 +173,7 @@ impl Library {
|
|||
let lib = self.get_video_paths(dir).await;
|
||||
|
||||
for path in lib {
|
||||
Video::insert_path_to_db(&self.conn, &path).await;
|
||||
Video::insert_path_to_db(&path).await;
|
||||
}
|
||||
|
||||
log::info!("Finished scanning {dir:?}");
|
||||
|
|
|
@ -1,165 +1,76 @@
|
|||
use std::path::PathBuf;
|
||||
use crate::{get_pg, pages::ToAPI, yt_meta};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use sqlx::prelude::FromRow;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::library::func::calculate_sha256_hash;
|
||||
use crate::yt_meta;
|
||||
|
||||
// todo : optimize
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct Video<'a> {
|
||||
directory: Option<String>,
|
||||
path: Option<PathBuf>,
|
||||
title: Option<String>,
|
||||
hash: String,
|
||||
youtube_id: Option<String>,
|
||||
description: Option<String>,
|
||||
uploader_name: Option<String>,
|
||||
uploader_id: Option<String>,
|
||||
duration: Option<i64>,
|
||||
views: Option<i64>,
|
||||
categories: Option<Vec<String>>,
|
||||
tags: Option<Vec<String>>,
|
||||
upload_date: Option<chrono::NaiveDate>,
|
||||
db: Option<&'a sqlx::PgPool>,
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
|
||||
pub struct YouTubeMeta {
|
||||
pub id: String,
|
||||
pub title: String,
|
||||
pub uploader_name: String,
|
||||
pub uploader_id: String,
|
||||
pub duration: i64,
|
||||
pub views: i64,
|
||||
pub upload_date: chrono::NaiveDate,
|
||||
}
|
||||
|
||||
// Video properties
|
||||
impl<'a> Video<'a> {
|
||||
async fn get_video_info(&mut self) {
|
||||
log::info!("Fetching Video Metadata for {}", self.hash);
|
||||
|
||||
let res: Vec<(String, String)> = sqlx::query_as(
|
||||
"SELECT title, path FROM videos WHERE sha256 = $1")
|
||||
.bind(&self.hash)
|
||||
.fetch_all(&*self.db.unwrap()).await.unwrap();
|
||||
|
||||
let res = res.first().unwrap();
|
||||
|
||||
self.title = Some(res.0.clone());
|
||||
self.path = Some(std::path::Path::new(&res.1).to_path_buf());
|
||||
impl YouTubeMeta {
|
||||
pub async fn get(id: &str) -> Option<Self> {
|
||||
sqlx::query_as("SELECT * FROM youtube_meta WHERE id = $1")
|
||||
.bind(id)
|
||||
.fetch_optional(get_pg!())
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
async fn get_youtube_meta_info(&mut self) {
|
||||
log::info!("Fetching YouTube Metadata for {}", self.hash);
|
||||
pub async fn tags(&self) -> Vec<String> {
|
||||
let res: Vec<(String,)> =
|
||||
sqlx::query_as("SELECT tag FROM youtube_meta_tags WHERE youtube_id = $1")
|
||||
.bind(&self.id)
|
||||
.fetch_all(get_pg!())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let res: Vec<(String, String, String, String, chrono::NaiveDate, i64)> = sqlx::query_as(
|
||||
"SELECT id, description, uploader_name, uploader_id, upload_date, views FROM youtube_meta WHERE id = (SELECT youtube_id FROM videos WHERE sha256 = $1 LIMIT 1)")
|
||||
.bind(&self.hash)
|
||||
.fetch_all(&**self.db.as_ref().unwrap()).await.unwrap();
|
||||
|
||||
if let Some(res) = res.first() {
|
||||
self.youtube_id = Some(res.0.clone());
|
||||
self.description = Some(res.1.clone());
|
||||
self.uploader_name = Some(res.2.clone());
|
||||
self.uploader_id = Some(res.3.clone());
|
||||
self.upload_date = Some(res.4.clone());
|
||||
self.views = Some(res.5);
|
||||
|
||||
let res: Vec<(String,)> = sqlx::query_as(
|
||||
"SELECT category FROM youtube_meta_categories WHERE youtube_id = $1")
|
||||
.bind(self.youtube_id.as_ref().unwrap())
|
||||
.fetch_all(&**self.db.as_ref().unwrap()).await.unwrap();
|
||||
|
||||
self.categories = Some(res.into_iter().map(|x| x.0).collect());
|
||||
|
||||
let res: Vec<(String,)> = sqlx::query_as(
|
||||
"SELECT tag FROM youtube_meta_tags WHERE youtube_id = $1")
|
||||
.bind(self.youtube_id.as_ref().unwrap())
|
||||
.fetch_all(&**self.db.as_ref().unwrap()).await.unwrap();
|
||||
|
||||
self.tags = Some(res.into_iter().map(|x| x.0).collect());
|
||||
}
|
||||
res.into_iter().map(|x| x.0).collect()
|
||||
}
|
||||
|
||||
pub async fn title(&mut self) -> Option<&str> {
|
||||
if self.title.is_none() {
|
||||
self.get_video_info().await;
|
||||
}
|
||||
self.title.as_deref()
|
||||
}
|
||||
pub async fn categories(&self) -> Vec<String> {
|
||||
let res: Vec<(String,)> =
|
||||
sqlx::query_as("SELECT category FROM youtube_meta_categories WHERE youtube_id = $1")
|
||||
.bind(&self.id)
|
||||
.fetch_all(get_pg!())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
pub async fn path(&mut self) -> Option<PathBuf> {
|
||||
if self.path.is_none() {
|
||||
self.get_video_info().await;
|
||||
}
|
||||
self.path.as_ref().map(std::clone::Clone::clone)
|
||||
res.into_iter().map(|x| x.0).collect()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn description(&mut self) -> Option<&str> {
|
||||
if self.description.is_none() {
|
||||
self.get_youtube_meta_info().await;
|
||||
}
|
||||
self.description.as_deref()
|
||||
}
|
||||
|
||||
pub async fn views(&mut self) -> Option<i64> {
|
||||
if self.views.is_none() {
|
||||
self.get_youtube_meta_info().await;
|
||||
}
|
||||
self.views
|
||||
}
|
||||
|
||||
pub async fn uploader_name(&mut self) -> Option<&str> {
|
||||
if self.uploader_name.is_none() {
|
||||
self.get_youtube_meta_info().await;
|
||||
}
|
||||
self.uploader_name.as_deref()
|
||||
}
|
||||
|
||||
pub async fn uploader_id(&mut self) -> Option<&str> {
|
||||
if self.uploader_id.is_none() {
|
||||
self.get_youtube_meta_info().await;
|
||||
}
|
||||
self.uploader_id.as_deref()
|
||||
}
|
||||
|
||||
pub async fn upload_date(&mut self) -> Option<chrono::NaiveDate> {
|
||||
if self.upload_date.is_none() {
|
||||
self.get_youtube_meta_info().await;
|
||||
}
|
||||
self.upload_date
|
||||
}
|
||||
|
||||
pub async fn categories(&mut self) -> Option<&Vec<String>> {
|
||||
if self.categories.is_none() {
|
||||
self.get_youtube_meta_info().await;
|
||||
}
|
||||
self.categories.as_ref()
|
||||
}
|
||||
|
||||
pub async fn tags(&mut self) -> Option<&Vec<String>> {
|
||||
if self.tags.is_none() {
|
||||
self.get_youtube_meta_info().await;
|
||||
}
|
||||
self.tags.as_ref()
|
||||
}
|
||||
|
||||
pub async fn youtube_id(&mut self) -> Option<&str> {
|
||||
if self.youtube_id.is_none() {
|
||||
self.get_youtube_meta_info().await;
|
||||
}
|
||||
self.youtube_id.as_deref()
|
||||
}
|
||||
|
||||
pub fn hash(&self) -> &str {
|
||||
&self.hash
|
||||
}
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
|
||||
pub struct Video {
|
||||
pub id: uuid::Uuid,
|
||||
pub directory: String,
|
||||
pub path: String,
|
||||
pub duration: i64,
|
||||
pub title: String,
|
||||
youtube_id: Option<String>,
|
||||
}
|
||||
|
||||
// Video Init
|
||||
impl<'a> Video<'a> {
|
||||
pub fn from_hash(hash: &str, db: &'a sqlx::PgPool) -> Self {
|
||||
Self {
|
||||
hash: hash.to_owned(),
|
||||
db: Some(db),
|
||||
..Default::default()
|
||||
}
|
||||
impl Video {
|
||||
pub async fn get(id: &uuid::Uuid) -> Option<Self> {
|
||||
sqlx::query_as("SELECT * FROM videos WHERE id = $1")
|
||||
.bind(id)
|
||||
.fetch_optional(get_pg!())
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn insert_path_to_db(db: &'a sqlx::PgPool, v: &PathBuf) -> Option<Self> {
|
||||
pub async fn insert_path_to_db(v: &PathBuf) -> Option<Self> {
|
||||
let db = get_pg!();
|
||||
|
||||
log::info!("Add {v:?} to library");
|
||||
let id = calculate_sha256_hash(v.to_str().unwrap()).unwrap();
|
||||
let file_name = v.file_stem().unwrap().to_str().unwrap().to_owned();
|
||||
let dir = v
|
||||
.parent()
|
||||
|
@ -184,80 +95,70 @@ impl<'a> Video<'a> {
|
|||
.bind(&meta.upload_date())
|
||||
.execute(&mut *tx).await.unwrap();
|
||||
|
||||
sqlx::query("INSERT INTO videos (sha256, directory, path, title, youtube_id) VALUES ($1, $2, $3, $4, $5)")
|
||||
.bind(&id)
|
||||
let vid = sqlx::query_as("INSERT INTO videos (directory, path, title, youtube_id) VALUES ($1, $2, $3, $4, $5)")
|
||||
.bind(&dir)
|
||||
.bind(v.to_str().unwrap())
|
||||
.bind(meta.title())
|
||||
.bind(meta.youtube_id().unwrap())
|
||||
.execute(&mut *tx).await.unwrap();
|
||||
.fetch_one(&mut *tx).await.unwrap();
|
||||
|
||||
for cat in meta.categories().unwrap() {
|
||||
sqlx::query("INSERT INTO youtube_meta_categories (youtube_id, category) VALUES ($1, $2)")
|
||||
.bind(meta.youtube_id().unwrap()).bind(cat).execute(&mut *tx).await.unwrap();
|
||||
sqlx::query(
|
||||
"INSERT INTO youtube_meta_categories (youtube_id, category) VALUES ($1, $2)",
|
||||
)
|
||||
.bind(meta.youtube_id().unwrap())
|
||||
.bind(cat)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
if let Some(tags) = meta.tags() {
|
||||
for tag in tags {
|
||||
sqlx::query(
|
||||
"INSERT INTO youtube_meta_tags (youtube_id, tag) VALUES ($1, $2)")
|
||||
.bind(&meta.youtube_id().unwrap()).bind(&tag).execute(&mut *tx).await.unwrap();
|
||||
sqlx::query("INSERT INTO youtube_meta_tags (youtube_id, tag) VALUES ($1, $2)")
|
||||
.bind(&meta.youtube_id().unwrap())
|
||||
.bind(&tag)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
tx.commit().await.unwrap();
|
||||
|
||||
return Some(Self {
|
||||
directory: Some(dir),
|
||||
path: Some(v.to_owned()),
|
||||
title: Some(meta.title()),
|
||||
hash: id,
|
||||
youtube_id: meta.youtube_id(),
|
||||
description: meta.description(),
|
||||
uploader_name: meta.uploader_name(),
|
||||
uploader_id: meta.uploader_id(),
|
||||
duration: meta.duration(),
|
||||
views: meta.views(),
|
||||
categories: meta.categories(),
|
||||
tags: meta.tags(),
|
||||
upload_date: meta.upload_date(),
|
||||
db: Some(db),
|
||||
});
|
||||
return Some(vid);
|
||||
}
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO videos (sha256, directory, path, title) VALUES ($1, $2, $3, $4)")
|
||||
.bind(&id).bind(dir).bind(v.to_str().unwrap()).bind(file_name).execute(&mut *tx).await.unwrap();
|
||||
let vid = sqlx::query_as(
|
||||
"INSERT INTO videos (directory, path, title) VALUES ($1, $2, $3, $4) RETURNING *",
|
||||
)
|
||||
.bind(dir)
|
||||
.bind(v.to_str().unwrap())
|
||||
.bind(file_name)
|
||||
.fetch_one(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
tx.commit().await.unwrap();
|
||||
|
||||
Some(Self {
|
||||
path: Some(v.to_owned()),
|
||||
title: Some(v.file_stem().unwrap().to_str().unwrap().to_owned()),
|
||||
hash: id,
|
||||
db: Some(db),
|
||||
..Self::default()
|
||||
})
|
||||
Some(vid)
|
||||
}
|
||||
}
|
||||
|
||||
impl Video<'_> {
|
||||
pub async fn api(&mut self) -> serde_json::Value {
|
||||
self.get_video_info().await;
|
||||
self.get_youtube_meta_info().await;
|
||||
impl ToAPI for Video {
|
||||
async fn api(&self) -> serde_json::Value {
|
||||
let yt_meta = if let Some(yt_meta) = &self.youtube_id {
|
||||
YouTubeMeta::get(yt_meta).await
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
json!({
|
||||
"directory": self.directory.as_ref().unwrap(),
|
||||
"title": self.title.as_ref().unwrap(),
|
||||
"hash": self.hash,
|
||||
"youtube_id": self.youtube_id,
|
||||
"uploader_name": self.uploader_name,
|
||||
"uploader_id": self.uploader_id,
|
||||
"id": &self.id,
|
||||
"directory": self.directory,
|
||||
"duration": self.duration,
|
||||
"views": self.views,
|
||||
"categories": self.categories,
|
||||
"tags": self.tags,
|
||||
"upload_date": self.upload_date,
|
||||
"title": self.title,
|
||||
"yt": yt_meta
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue