add vector db
This commit is contained in:
parent
6882f16e14
commit
609d520986
9 changed files with 547 additions and 38 deletions
179
src/ai.rs
Normal file
179
src/ai.rs
Normal file
|
@ -0,0 +1,179 @@
|
|||
use std::collections::VecDeque;
|
||||
|
||||
use based::get_pg;
|
||||
use ollama_rs::generation::embeddings::request::{EmbeddingsInput, GenerateEmbeddingsRequest};
|
||||
use serde::Serialize;
|
||||
use sqlx::FromRow;
|
||||
|
||||
use crate::archive::{Document, Domain, WebsiteArchive};
|
||||
|
||||
#[derive(Debug, Clone, FromRow, Serialize)]
|
||||
pub struct DocEmbedding {
|
||||
pub domain: String,
|
||||
pub path: String,
|
||||
pub ver: String,
|
||||
|
||||
#[serde(skip)]
|
||||
embed_mxbai_embed_large: pgvector::Vector,
|
||||
|
||||
#[sqlx(default)]
|
||||
pub similarity: f64,
|
||||
}
|
||||
|
||||
pub trait Embedding {
|
||||
fn embedding(&self, ver: Option<String>)
|
||||
-> impl std::future::Future<Output = Option<Vec<f32>>>;
|
||||
}
|
||||
|
||||
impl Embedding for Document {
|
||||
async fn embedding(&self, ver: Option<String>) -> Option<Vec<f32>> {
|
||||
let latest = "latest".to_string();
|
||||
log::info!(
|
||||
"Generating Vector embeddings for {} / {} @ {}",
|
||||
self.domain,
|
||||
self.path,
|
||||
ver.as_ref().unwrap_or(&latest)
|
||||
);
|
||||
|
||||
let content_html = self.render_local(ver).await.unwrap();
|
||||
let content = html2md::parse_html(&content_html);
|
||||
generate_embedding(content).await
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn generate_embedding(input: String) -> Option<Vec<f32>> {
|
||||
if let Ok(ollama_url) = std::env::var("OLLAMA_URL") {
|
||||
let (host, port) = ollama_url.split_once(':')?;
|
||||
let ollama = ollama_rs::Ollama::new(format!("http://{host}"), port.parse().ok()?);
|
||||
|
||||
let models = ollama.list_local_models().await.ok()?;
|
||||
|
||||
if !models
|
||||
.into_iter()
|
||||
.any(|x| x.name.starts_with("mxbai-embed-large"))
|
||||
{
|
||||
log::info!("Model not found. Pulling 'mxbai-embed-large'");
|
||||
ollama
|
||||
.pull_model("mxbai-embed-large".to_string(), false)
|
||||
.await
|
||||
.ok()?;
|
||||
}
|
||||
|
||||
let res = ollama
|
||||
.generate_embeddings(GenerateEmbeddingsRequest::new(
|
||||
"mxbai-embed-large".to_string(),
|
||||
EmbeddingsInput::Single(input),
|
||||
))
|
||||
.await
|
||||
.ok()?;
|
||||
let embed = res.embeddings.first()?;
|
||||
return Some(embed.clone());
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub struct EmbedStore;
|
||||
|
||||
impl EmbedStore {
|
||||
pub async fn get_embedding(doc: &Document, ver: Option<&str>) -> Option<DocEmbedding> {
|
||||
let use_ver = ver.map_or_else(
|
||||
|| {
|
||||
let version = doc.versions();
|
||||
version.first().unwrap().clone()
|
||||
},
|
||||
|x| x.to_string(),
|
||||
);
|
||||
sqlx::query_as("SELECT * FROM doc_embedding WHERE domain = $1 AND path = $2 AND ver = $3")
|
||||
.bind(&doc.domain)
|
||||
.bind(&doc.path)
|
||||
.bind(use_ver)
|
||||
.fetch_optional(get_pg!())
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn embed_document(doc: &Document, ver: &str) {
|
||||
if let Some(embed) = doc.embedding(Some(ver.to_string())).await {
|
||||
let _ = sqlx::query(
|
||||
"DELETE FROM doc_embedding WHERE domain = $1 AND path = $2 AND ver = $3",
|
||||
)
|
||||
.bind(&doc.domain)
|
||||
.bind(&doc.path)
|
||||
.bind(ver)
|
||||
.execute(get_pg!())
|
||||
.await;
|
||||
|
||||
sqlx::query("INSERT INTO doc_embedding VALUES ($1, $2, $3, $4)")
|
||||
.bind(&doc.domain)
|
||||
.bind(&doc.path)
|
||||
.bind(ver)
|
||||
.bind(embed)
|
||||
.execute(get_pg!())
|
||||
.await
|
||||
.unwrap();
|
||||
} else {
|
||||
log::warn!(
|
||||
"No embeds could be generated for {} / {}",
|
||||
doc.domain,
|
||||
doc.path
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn ensure_embedding(doc: &Document) {
|
||||
for ver in doc.versions() {
|
||||
if Self::get_embedding(doc, Some(ver.as_str())).await.is_none() {
|
||||
Self::embed_document(doc, &ver).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn search_vector(v: &pgvector::Vector) -> Vec<DocEmbedding> {
|
||||
sqlx::query_as(
|
||||
"SELECT *, 1 / (1 + (embed_mxbai_embed_large <-> $1)) AS similarity FROM doc_embedding ORDER BY embed_mxbai_embed_large <-> $1 LIMIT 5",
|
||||
)
|
||||
.bind(v)
|
||||
.fetch_all(get_pg!())
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn generate_embeddings_for(arc: &WebsiteArchive) {
|
||||
log::info!("Generating embeddings");
|
||||
|
||||
for dom in arc.domains() {
|
||||
let dom = arc.get_domain(&dom);
|
||||
embed_path(&dom, "/").await;
|
||||
}
|
||||
|
||||
log::info!("Done generating embeddings");
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn embed_path(dom: &Domain, path: &str) {
|
||||
let (paths, is_doc) = dom.paths(path);
|
||||
|
||||
// If the path is a document, process the root path.
|
||||
if is_doc {
|
||||
let doc = dom.path("/");
|
||||
EmbedStore::ensure_embedding(&doc).await;
|
||||
}
|
||||
|
||||
// Create a queue to process paths iteratively
|
||||
let mut queue = VecDeque::new();
|
||||
|
||||
// Add the initial paths to the queue
|
||||
queue.extend(paths);
|
||||
|
||||
while let Some(next_path) = queue.pop_front() {
|
||||
let (next_paths, is_doc) = dom.paths(next_path.path());
|
||||
|
||||
if is_doc {
|
||||
let doc = dom.path(next_path.path());
|
||||
EmbedStore::ensure_embedding(&doc).await;
|
||||
}
|
||||
|
||||
queue.extend(next_paths);
|
||||
}
|
||||
}
|
|
@ -214,7 +214,17 @@ impl Document {
|
|||
pub fn versions(&self) -> Vec<String> {
|
||||
let mut res: Vec<String> = read_dir(&self.doc_dir())
|
||||
.into_iter()
|
||||
.filter(|x| x.starts_with("index_") && x.ends_with(".html"))
|
||||
.filter_map(|x| {
|
||||
if x.starts_with("index_") && x.ends_with(".html") {
|
||||
return Some(
|
||||
x.trim_start_matches("index_")
|
||||
.trim_end_matches(".html")
|
||||
.to_string(),
|
||||
);
|
||||
}
|
||||
|
||||
None
|
||||
})
|
||||
.collect();
|
||||
res.sort();
|
||||
res.reverse();
|
||||
|
|
15
src/main.rs
15
src/main.rs
|
@ -1,6 +1,9 @@
|
|||
use ai::EmbedStore;
|
||||
use archive::WebsiteArchive;
|
||||
use based::get_pg;
|
||||
use rocket::routes;
|
||||
|
||||
mod ai;
|
||||
mod archive;
|
||||
mod blacklist;
|
||||
mod favicon;
|
||||
|
@ -12,6 +15,15 @@ async fn launch() -> _ {
|
|||
|
||||
let arc = WebsiteArchive::new("./websites");
|
||||
|
||||
if std::env::var("DATABASE_URL").is_ok() {
|
||||
let pg = get_pg!();
|
||||
sqlx::migrate!("./migrations").run(pg).await.unwrap();
|
||||
}
|
||||
|
||||
if std::env::var("OLLAMA_URL").is_ok() {
|
||||
EmbedStore::generate_embeddings_for(&arc).await;
|
||||
}
|
||||
|
||||
let archive = arc.clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
|
@ -25,7 +37,8 @@ async fn launch() -> _ {
|
|||
pages::index,
|
||||
pages::render_website,
|
||||
pages::domain_info_route,
|
||||
pages::favicon_route
|
||||
pages::favicon_route,
|
||||
pages::vector_search
|
||||
],
|
||||
)
|
||||
.manage(arc)
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
use std::{io::Read, path::PathBuf};
|
||||
|
||||
use based::request::{assets::DataResponse, RequestContext, StringResponse};
|
||||
use based::request::{assets::DataResponse, respond_json, RequestContext, StringResponse};
|
||||
use maud::html;
|
||||
use rocket::{get, State};
|
||||
|
||||
pub mod component;
|
||||
use component::*;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::archive::WebsiteArchive;
|
||||
use crate::{
|
||||
ai::{generate_embedding, EmbedStore},
|
||||
archive::WebsiteArchive,
|
||||
};
|
||||
|
||||
/// Get the favicon of a domain
|
||||
#[get("/favicon/<domain>")]
|
||||
|
@ -60,15 +64,7 @@ pub async fn domain_info_route(
|
|||
) -> StringResponse {
|
||||
let domain = arc.get_domain(domain);
|
||||
let document = domain.path(paths.to_str().unwrap());
|
||||
let versions: Vec<String> = document
|
||||
.versions()
|
||||
.into_iter()
|
||||
.map(|x| {
|
||||
x.trim_start_matches("index_")
|
||||
.trim_end_matches(".html")
|
||||
.to_string()
|
||||
})
|
||||
.collect();
|
||||
let versions: Vec<String> = document.versions();
|
||||
let (path_entries, is_doc) = domain.paths(paths.to_str().unwrap());
|
||||
let path_seperations: Vec<&str> = paths.to_str().unwrap().split('/').collect();
|
||||
|
||||
|
@ -163,3 +159,28 @@ pub async fn render_website(
|
|||
|
||||
None
|
||||
}
|
||||
|
||||
#[get("/vector_search?<query>")]
|
||||
pub async fn vector_search(query: &str) -> Option<StringResponse> {
|
||||
if std::env::var("OLLAMA_URL").is_err() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if query.ends_with(".json") {
|
||||
let query = query.trim_end_matches(".json");
|
||||
let results = EmbedStore::search_vector(&pgvector::Vector::from(
|
||||
generate_embedding(query.to_string()).await?,
|
||||
))
|
||||
.await;
|
||||
return Some(respond_json(&json!(&results)));
|
||||
}
|
||||
|
||||
let results = EmbedStore::search_vector(&pgvector::Vector::from(
|
||||
generate_embedding(query.to_string()).await?,
|
||||
))
|
||||
.await;
|
||||
|
||||
// TODO : Implement Search UI with HTMX
|
||||
|
||||
None
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue