This commit is contained in:
JMARyA 2025-01-11 21:41:04 +01:00
parent 8a4e24ca20
commit f73a6cfd5c
Signed by: jmarya
GPG key ID: 901B2ADDF27C2263

View file

@ -1,3 +1,5 @@
use std::time::Duration;
use crate::{ use crate::{
config::OllamaConfig, config::OllamaConfig,
linux::{arch_chroot, systemd_service_enable}, linux::{arch_chroot, systemd_service_enable},
@ -19,11 +21,13 @@ pub fn setup_ollama(conf: &OllamaConfig) {
let mut ollama_server = std::process::Command::new("arch-chroot") let mut ollama_server = std::process::Command::new("arch-chroot")
.arg("/mnt") .arg("/mnt")
.arg("runuser -u ollama -- env OLLAMA_MODELS=/var/lib/ollama HOME=/var/lib/ollama /usr/bin/ollama serve") .arg("/bin/runuser -u ollama -- env OLLAMA_MODELS=/var/lib/ollama HOME=/var/lib/ollama /usr/bin/ollama serve")
.stdout(std::process::Stdio::piped()) .stdout(std::process::Stdio::piped())
.spawn() .spawn()
.expect("Failed to start ollama server"); .expect("Failed to start ollama server");
std::thread::sleep(Duration::from_secs(5));
let models = conf.models.clone().unwrap_or_default(); let models = conf.models.clone().unwrap_or_default();
for model in models { for model in models {