This commit is contained in:
parent
609d520986
commit
7526280223
7 changed files with 116 additions and 29 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -164,7 +164,7 @@ checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b"
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "based"
|
name = "based"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
source = "git+https://git.hydrar.de/jmarya/based#d6555edc29de66ff5190b716a1f8ebac8dbb2110"
|
source = "git+https://git.hydrar.de/jmarya/based#00bb6f152d758252d62a511705ef35c8aa118168"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bcrypt",
|
"bcrypt",
|
||||||
"chrono",
|
"chrono",
|
||||||
|
|
|
@ -16,7 +16,7 @@ tokio = { version = "1.35.1", features = ["full"] }
|
||||||
uuid = { version = "1.8.0", features = ["v4", "serde"] }
|
uuid = { version = "1.8.0", features = ["v4", "serde"] }
|
||||||
sqlx = { version = "0.8", features = ["postgres", "runtime-tokio-native-tls", "derive", "uuid", "chrono", "json"] }
|
sqlx = { version = "0.8", features = ["postgres", "runtime-tokio-native-tls", "derive", "uuid", "chrono", "json"] }
|
||||||
maud = "0.26.0"
|
maud = "0.26.0"
|
||||||
based = { git = "https://git.hydrar.de/jmarya/based", features = [] }
|
based = { git = "https://git.hydrar.de/jmarya/based", features = ["htmx"] }
|
||||||
url = "2.5.4"
|
url = "2.5.4"
|
||||||
reqwest = "0.12.11"
|
reqwest = "0.12.11"
|
||||||
ollama-rs = "0.2.2"
|
ollama-rs = "0.2.2"
|
||||||
|
|
26
src/ai.rs
26
src/ai.rs
|
@ -1,8 +1,9 @@
|
||||||
use std::collections::VecDeque;
|
use std::collections::VecDeque;
|
||||||
|
|
||||||
use based::get_pg;
|
use based::{get_pg, request::api::ToAPI};
|
||||||
use ollama_rs::generation::embeddings::request::{EmbeddingsInput, GenerateEmbeddingsRequest};
|
use ollama_rs::generation::embeddings::request::{EmbeddingsInput, GenerateEmbeddingsRequest};
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
use serde_json::json;
|
||||||
use sqlx::FromRow;
|
use sqlx::FromRow;
|
||||||
|
|
||||||
use crate::archive::{Document, Domain, WebsiteArchive};
|
use crate::archive::{Document, Domain, WebsiteArchive};
|
||||||
|
@ -20,6 +21,17 @@ pub struct DocEmbedding {
|
||||||
pub similarity: f64,
|
pub similarity: f64,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ToAPI for DocEmbedding {
|
||||||
|
async fn api(&self) -> serde_json::Value {
|
||||||
|
json!({
|
||||||
|
"domain": self.domain,
|
||||||
|
"path": self.path,
|
||||||
|
"ver": self.ver,
|
||||||
|
"similarity": self.similarity
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub trait Embedding {
|
pub trait Embedding {
|
||||||
fn embedding(&self, ver: Option<String>)
|
fn embedding(&self, ver: Option<String>)
|
||||||
-> impl std::future::Future<Output = Option<Vec<f32>>>;
|
-> impl std::future::Future<Output = Option<Vec<f32>>>;
|
||||||
|
@ -41,7 +53,7 @@ impl Embedding for Document {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn generate_embedding(input: String) -> Option<Vec<f32>> {
|
pub async fn generate_embedding(mut input: String) -> Option<Vec<f32>> {
|
||||||
if let Ok(ollama_url) = std::env::var("OLLAMA_URL") {
|
if let Ok(ollama_url) = std::env::var("OLLAMA_URL") {
|
||||||
let (host, port) = ollama_url.split_once(':')?;
|
let (host, port) = ollama_url.split_once(':')?;
|
||||||
let ollama = ollama_rs::Ollama::new(format!("http://{host}"), port.parse().ok()?);
|
let ollama = ollama_rs::Ollama::new(format!("http://{host}"), port.parse().ok()?);
|
||||||
|
@ -59,6 +71,10 @@ pub async fn generate_embedding(input: String) -> Option<Vec<f32>> {
|
||||||
.ok()?;
|
.ok()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if input.is_empty() {
|
||||||
|
input = " ".to_string();
|
||||||
|
}
|
||||||
|
|
||||||
let res = ollama
|
let res = ollama
|
||||||
.generate_embeddings(GenerateEmbeddingsRequest::new(
|
.generate_embeddings(GenerateEmbeddingsRequest::new(
|
||||||
"mxbai-embed-large".to_string(),
|
"mxbai-embed-large".to_string(),
|
||||||
|
@ -129,11 +145,13 @@ impl EmbedStore {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn search_vector(v: &pgvector::Vector) -> Vec<DocEmbedding> {
|
pub async fn search_vector(v: &pgvector::Vector, limit: i64, offset: i64) -> Vec<DocEmbedding> {
|
||||||
sqlx::query_as(
|
sqlx::query_as(
|
||||||
"SELECT *, 1 / (1 + (embed_mxbai_embed_large <-> $1)) AS similarity FROM doc_embedding ORDER BY embed_mxbai_embed_large <-> $1 LIMIT 5",
|
"SELECT *, 1 / (1 + (embed_mxbai_embed_large <-> $1)) AS similarity FROM doc_embedding ORDER BY embed_mxbai_embed_large <-> $1 LIMIT $2 OFFSET $3",
|
||||||
)
|
)
|
||||||
.bind(v)
|
.bind(v)
|
||||||
|
.bind(limit)
|
||||||
|
.bind(offset)
|
||||||
.fetch_all(get_pg!())
|
.fetch_all(get_pg!())
|
||||||
.await
|
.await
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
|
|
@ -151,7 +151,11 @@ impl Document {
|
||||||
pub fn new(domain: &str, path: &str, base_dir: PathBuf) -> Self {
|
pub fn new(domain: &str, path: &str, base_dir: PathBuf) -> Self {
|
||||||
Self {
|
Self {
|
||||||
domain: domain.to_string(),
|
domain: domain.to_string(),
|
||||||
path: path.to_string(),
|
path: path
|
||||||
|
.split('/')
|
||||||
|
.filter(|x| !x.is_empty())
|
||||||
|
.collect::<Vec<&str>>()
|
||||||
|
.join("/"),
|
||||||
base_dir,
|
base_dir,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,6 +34,7 @@ async fn launch() -> _ {
|
||||||
.mount(
|
.mount(
|
||||||
"/",
|
"/",
|
||||||
routes![
|
routes![
|
||||||
|
based::htmx::htmx_script_route,
|
||||||
pages::index,
|
pages::index,
|
||||||
pages::render_website,
|
pages::render_website,
|
||||||
pages::domain_info_route,
|
pages::domain_info_route,
|
||||||
|
|
|
@ -59,10 +59,18 @@ pub fn gen_path_link(
|
||||||
///
|
///
|
||||||
/// # Returns
|
/// # Returns
|
||||||
/// A `PreEscaped<String>` containing the HTML markup for the path header.
|
/// A `PreEscaped<String>` containing the HTML markup for the path header.
|
||||||
pub fn gen_path_header(path_seperations: Vec<&str>, domain: &str) -> PreEscaped<String> {
|
pub fn gen_path_header(
|
||||||
|
path_seperations: Vec<&str>,
|
||||||
|
domain: &str,
|
||||||
|
link: bool,
|
||||||
|
) -> PreEscaped<String> {
|
||||||
html! {
|
html! {
|
||||||
@for (index, path) in path_seperations.iter().enumerate() {
|
@for (index, path) in path_seperations.iter().enumerate() {
|
||||||
|
@if link {
|
||||||
(gen_path_link(path, index, &path_seperations, domain))
|
(gen_path_link(path, index, &path_seperations, domain))
|
||||||
|
} @else {
|
||||||
|
p { (path) }
|
||||||
|
}
|
||||||
@if index < path_seperations.len()-1 {
|
@if index < path_seperations.len()-1 {
|
||||||
(slash_seperator())
|
(slash_seperator())
|
||||||
};
|
};
|
||||||
|
@ -79,6 +87,7 @@ pub async fn render_page(content: PreEscaped<String>, ctx: RequestContext) -> St
|
||||||
html! {
|
html! {
|
||||||
script src="https://cdn.tailwindcss.com" {};
|
script src="https://cdn.tailwindcss.com" {};
|
||||||
meta name="viewport" content="width=device-width, initial-scale=1.0" {};
|
meta name="viewport" content="width=device-width, initial-scale=1.0" {};
|
||||||
|
script src="/assets/htmx.min.js" {};
|
||||||
},
|
},
|
||||||
html! {},
|
html! {},
|
||||||
Some("bg-zinc-950 text-white min-h-screen flex pt-8 justify-center".to_string()),
|
Some("bg-zinc-950 text-white min-h-screen flex pt-8 justify-center".to_string()),
|
||||||
|
|
|
@ -1,7 +1,12 @@
|
||||||
use std::{io::Read, path::PathBuf};
|
use std::{io::Read, path::PathBuf};
|
||||||
|
|
||||||
use based::request::{assets::DataResponse, respond_json, RequestContext, StringResponse};
|
use based::{
|
||||||
use maud::html;
|
page::search::Search,
|
||||||
|
request::{
|
||||||
|
api::GeneratedPager, assets::DataResponse, respond_json, RequestContext, StringResponse,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
use maud::{html, PreEscaped};
|
||||||
use rocket::{get, State};
|
use rocket::{get, State};
|
||||||
|
|
||||||
pub mod component;
|
pub mod component;
|
||||||
|
@ -9,10 +14,12 @@ use component::*;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ai::{generate_embedding, EmbedStore},
|
ai::{generate_embedding, DocEmbedding, EmbedStore},
|
||||||
archive::WebsiteArchive,
|
archive::WebsiteArchive,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const SEARCH_BAR_STYLE: &'static str = "w-full px-4 mb-4 py-2 text-white bg-black border-2 border-neon-blue placeholder-neon-blue focus:ring-2 focus:ring-neon-pink focus:outline-none font-mono text-lg";
|
||||||
|
|
||||||
/// Get the favicon of a domain
|
/// Get the favicon of a domain
|
||||||
#[get("/favicon/<domain>")]
|
#[get("/favicon/<domain>")]
|
||||||
pub async fn favicon_route(domain: &str) -> Option<DataResponse> {
|
pub async fn favicon_route(domain: &str) -> Option<DataResponse> {
|
||||||
|
@ -36,9 +43,17 @@ pub async fn index(ctx: RequestContext, arc: &State<WebsiteArchive>) -> StringRe
|
||||||
|
|
||||||
let content = html! {
|
let content = html! {
|
||||||
div class="container mx-auto p-4" {
|
div class="container mx-auto p-4" {
|
||||||
|
|
||||||
|
div class="mb-4" {
|
||||||
|
input type="search" name="query" placeholder="Search..." class=(SEARCH_BAR_STYLE)
|
||||||
|
hx-get=("/vector_search")
|
||||||
|
hx-target="#website_grid" hx-push-url="true" hx-swap="outerHTML" {};
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
div id="website_grid" {
|
||||||
h1 class="text-5xl font-bold text-center mb-10" { "Websites" };
|
h1 class="text-5xl font-bold text-center mb-10" { "Websites" };
|
||||||
div class="grid grid-cols-2 sm:grid-cols-3 lg:grid-cols-5 xl:grid-cols-6 2xl:grid-cols-8 gap-6" {
|
div class="grid grid-cols-2 sm:grid-cols-3 lg:grid-cols-5 xl:grid-cols-6 2xl:grid-cols-8 gap-6" {
|
||||||
|
|
||||||
@for site in websites {
|
@for site in websites {
|
||||||
a href=(format!("/d/{site}")) class="bg-neutral-900 shadow-md rounded-lg hover:bg-neutral-800 bg-gray-1 hover:cursor-pointer transition-all duration-300 flex flex-col items-center justify-center aspect-square max-w-60" {
|
a href=(format!("/d/{site}")) class="bg-neutral-900 shadow-md rounded-lg hover:bg-neutral-800 bg-gray-1 hover:cursor-pointer transition-all duration-300 flex flex-col items-center justify-center aspect-square max-w-60" {
|
||||||
div class="bg-blue-500 text-white rounded-full p-4" {
|
div class="bg-blue-500 text-white rounded-full p-4" {
|
||||||
|
@ -48,6 +63,7 @@ pub async fn index(ctx: RequestContext, arc: &State<WebsiteArchive>) -> StringRe
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -75,7 +91,7 @@ pub async fn domain_info_route(
|
||||||
img class="p-2" src=(format!("/favicon/{}", &domain.name)) {};
|
img class="p-2" src=(format!("/favicon/{}", &domain.name)) {};
|
||||||
a href=(format!("/d/{}", &domain.name)) { (domain.name) };
|
a href=(format!("/d/{}", &domain.name)) { (domain.name) };
|
||||||
(slash_seperator())
|
(slash_seperator())
|
||||||
(gen_path_header(path_seperations, &domain.name))
|
(gen_path_header(path_seperations, &domain.name, true))
|
||||||
};
|
};
|
||||||
|
|
||||||
@if !versions.is_empty() {
|
@if !versions.is_empty() {
|
||||||
|
@ -160,27 +176,66 @@ pub async fn render_website(
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/vector_search?<query>")]
|
pub fn gen_search_element(x: &DocEmbedding) -> PreEscaped<String> {
|
||||||
pub async fn vector_search(query: &str) -> Option<StringResponse> {
|
html! {
|
||||||
|
div class="text-xl font-bold mt-4 p-4 flex items-center w-full max-w-4xl max-h-40 mx-auto bg-neutral-800 shadow-md rounded-lg overflow-hidden border border-neutral-900 hover:cursor-pointer"
|
||||||
|
hx-get=(format!("/d/{}/{}", x.domain, x.path))
|
||||||
|
hx-target="#main_content" hx-push-url="true" hx-swap="innerHTML"
|
||||||
|
{
|
||||||
|
img class="p-2" src=(format!("/favicon/{}", &x.domain));
|
||||||
|
a { (x.domain) };
|
||||||
|
(slash_seperator());
|
||||||
|
(gen_path_header(x.path.split('/').collect(), &x.domain, false));
|
||||||
|
p class="font-bold p-2 text-stone-400" { (format!("{:.2} %", x.similarity * 100.0)) };
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/vector_search?<query>&<page>")]
|
||||||
|
pub async fn vector_search(
|
||||||
|
query: Option<&str>,
|
||||||
|
page: Option<i64>,
|
||||||
|
ctx: RequestContext,
|
||||||
|
) -> Option<StringResponse> {
|
||||||
if std::env::var("OLLAMA_URL").is_err() {
|
if std::env::var("OLLAMA_URL").is_err() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
if query.ends_with(".json") {
|
let page = page.unwrap_or(1);
|
||||||
let query = query.trim_end_matches(".json");
|
|
||||||
let results = EmbedStore::search_vector(&pgvector::Vector::from(
|
// Search
|
||||||
generate_embedding(query.to_string()).await?,
|
let search =
|
||||||
))
|
Search::new("/vector_search".to_string()).search_class(SEARCH_BAR_STYLE.to_string());
|
||||||
|
|
||||||
|
if let Some(query) = query {
|
||||||
|
// If we have query
|
||||||
|
let real_query = query.trim_end_matches(".json");
|
||||||
|
|
||||||
|
// Search Results
|
||||||
|
let vector = pgvector::Vector::from(generate_embedding(real_query.to_string()).await?);
|
||||||
|
|
||||||
|
let results = GeneratedPager::new(
|
||||||
|
|input, offset, limit| {
|
||||||
|
Box::pin(async move {
|
||||||
|
EmbedStore::search_vector(&input, limit as i64, offset as i64).await
|
||||||
|
})
|
||||||
|
},
|
||||||
|
5,
|
||||||
|
)
|
||||||
|
.pager(page as u64, vector)
|
||||||
.await;
|
.await;
|
||||||
return Some(respond_json(&json!(&results)));
|
|
||||||
|
// API Route
|
||||||
|
if query.ends_with(".json") {
|
||||||
|
return Some(respond_json(&json!(&results.page(page as u64))));
|
||||||
|
}
|
||||||
|
|
||||||
|
let content = search.build_response(&ctx, results, page, real_query, gen_search_element);
|
||||||
|
|
||||||
|
return Some(render_page(content, ctx).await);
|
||||||
}
|
}
|
||||||
|
|
||||||
let results = EmbedStore::search_vector(&pgvector::Vector::from(
|
// Return new search site
|
||||||
generate_embedding(query.to_string()).await?,
|
let content = search.build("", html! {});
|
||||||
))
|
Some(render_page(content, ctx).await)
|
||||||
.await;
|
|
||||||
|
|
||||||
// TODO : Implement Search UI with HTMX
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue