pacco/src/main.rs
JMARyA 316b2ac288
All checks were successful
ci/woodpecker/push/build Pipeline was successful
🚑️ fix ci
2025-04-16 09:13:57 +02:00

422 lines
14 KiB
Rust

use std::net::Ipv4Addr;
use std::path::{Path, PathBuf};
use based::asset::AssetRoutes;
use based::auth::User;
use based::get_pg;
use based::ui::components::prelude::Shell;
use based::ui::prelude::*;
use cli::PaccoCLI;
use cmd_lib::{run_cmd, run_fun};
use config::Config;
use pacco::pkg::arch::Architecture;
use pacco::pkg::package::run_command;
use pacco::pkg::{Package, Repository};
use rocket::data::ToByteUnit;
use rocket::{routes, tokio};
pub mod cli;
pub mod config;
pub mod routes;
async fn launch(config: String) {
env_logger::init();
let pg = get_pg!();
sqlx::migrate!("./migrations").run(pg).await.unwrap();
let config: Config =
toml::from_str(&std::fs::read_to_string(&config).unwrap_or_default()).unwrap_or_default();
let _ = User::create("admin".to_string(), "admin", based::auth::UserRole::Admin).await;
let shell = Shell::new(
Nothing(),
Nothing(),
Background(MinHeight(
ScreenValue::screen,
Padding(Text("").color(&Gray::_200)).all(ScreenValue::_10),
))
.color(Gray::_900),
)
.use_ui();
rocket::build()
.configure(rocket::Config {
limits: rocket::data::Limits::new()
.limit("bytes", 8.kibibytes())
.limit("data-form", 10.gigabytes())
.limit("file", 10.gigabytes())
.limit("form", 32.kibibytes())
.limit("json", 1.mebibytes())
.limit("msgpack", 1.mebibytes())
.limit("string", 8.kibibytes()),
address: std::net::IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)),
port: 8000,
..Default::default()
})
.mount_assets()
.mount(
"/",
routes![
routes::index_page,
routes::pkg_route,
routes::push::upload_pkg,
routes::user::login,
routes::user::login_post,
routes::user::account_page,
routes::ui::pkg_ui,
routes::ui::repo_ui,
routes::user::new_api_key,
routes::user::end_session,
routes::user::change_password,
routes::user::change_password_post
],
)
.manage(config)
.manage(shell)
.launch()
.await
.unwrap();
}
use std::process::{Command, Stdio};
pub fn build(image: &str, ci: bool) {
// Get current directory
let current_dir = std::env::current_dir().expect("Failed to get current directory");
let uid = nix::unistd::Uid::current().as_raw();
let move_pkg = format!("rsync -a --chown={uid}:{uid} /build/*.pkg.tar.* /workdir/");
let sign_key = std::env::var("SIGN_KEY");
// Build the Docker command
let mut docker_script = vec![
"set -e".to_string(),
"pacman -Syu --noconfirm".to_string(),
"pacman -S --noconfirm rsync base-devel".to_string(),
];
if ci {
let symlink_cmd = format!(
"ln -s {} /build",
std::env::current_dir().unwrap().display()
);
docker_script.extend([symlink_cmd]);
} else {
docker_script.extend(["rsync -a /workdir/ /build/".to_string()]);
};
docker_script.extend([
"useradd -m build".to_string(),
"echo 'ALL ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers".to_string(),
"chown -R build /build".to_string(),
]);
if ci {
if std::fs::exists("./pacman.conf").unwrap() {
println!("-> Using custom pacman.conf");
docker_script.extend(["cp -v ./workdir/pacman.conf /etc/pacman.conf".to_string()]);
}
if std::fs::exists("./makepkg.conf").unwrap() {
println!("-> Using custom makepkg.conf");
docker_script.extend(["cp -v ./workdir/pacman.conf /etc/makepkg.conf".to_string()]);
}
}
if let Ok(sign_key) = sign_key {
println!("Found signing key. Package will be signed.");
let sign_key = sign_key.trim();
let sign_key = sign_key.replace('\n', "\\n");
let import_cmd = format!("echo -e '{sign_key}'|gpg --import");
let import_cmd_user = format!("su build -c \"echo -e '{sign_key}'|gpg --import\"");
let export_var_cmd = format!(
"export GPGKEY=$(gpg --list-secret-keys --with-colons | awk -F: '/^fpr/ {{ print $10 }}'|head -n1)"
);
let trust_cmd = format!(
"su build -w GPGKEY -c sh -c 'echo -e \"5\" | gpg --batch --yes --no-tty --command-fd 0 --edit-key $GPGKEY trust'"
);
docker_script.extend([
import_cmd,
import_cmd_user,
export_var_cmd,
trust_cmd,
"su build -w GPGKEY -c 'cd /build && makepkg -s -C -c --skippgpcheck --sign --noconfirm'".to_string(),
]);
} else {
docker_script.extend([
"su build -w GPGKEY -c 'cd /build && makepkg -c -C -s --noconfirm --skippgpcheck'"
.to_string(),
]);
}
docker_script.extend([move_pkg]);
// Build the Docker run command
let status = if ci {
Command::new("bash")
.stderr(Stdio::inherit())
.stdout(Stdio::inherit())
.args([
"-c",
&format!("mkdir /build && {}", docker_script.join(";")),
])
.status()
.expect("Failed to run build")
} else {
// TODO : mount custom pacman.conf + makepkg.conf
let workdir_vol = format!("{}:/workdir", current_dir.display());
let mut args = vec![
"run",
"--rm", // Remove container after exit
"-v",
&workdir_vol, // Mount current dir to /workdir
];
let mut extra_vols = Vec::new();
if std::fs::exists("./pacman.conf").unwrap() {
println!("-> Using custom pacman.conf");
extra_vols.push(format!(
"{}:/pacman.conf",
current_dir.join("pacman.conf").display()
));
}
if std::fs::exists("./makepkg.conf").unwrap() {
println!("-> Using custom makepkg.conf");
extra_vols.push(format!(
"{}:/makepkg.conf",
current_dir.join("makepkg.conf").display()
));
}
for vol in &extra_vols {
args.extend([vol.as_str()]);
}
let cmd = format!(
// We mount /workdir and work inside the container
"mkdir /build && {}",
docker_script.join(";")
);
args.extend([
"-w", "/workdir", // Set working directory
image, // Docker Base Image
"bash", "-c", &cmd,
]);
Command::new("docker")
.stderr(Stdio::inherit())
.stdout(Stdio::inherit())
.args(args)
.status()
.expect("Failed to start Docker")
};
if !status.success() {
eprintln!("Build failed with status: {:?}", status);
}
}
pub fn find_package_files() -> Vec<String> {
let output = Command::new("sh")
.arg("-c")
.arg("ls -1 *.pkg.tar.*")
.output()
.expect("failed to execute process");
let res = String::from_utf8_lossy(&output.stdout).to_string();
res.split("\n")
.filter_map(|x| {
let x = x.trim();
if x.is_empty() {
None
} else {
if x.ends_with(".sig") {
return None;
}
Some(x.to_string())
}
})
.collect()
}
pub fn pacco_push(package: &str, repo: &str, signed: bool) {
let pacco_domain = std::env::var("PACCO_HOST").unwrap();
let pkg = package;
let repo = repo;
let pkgstr = format!("pkg=@{pkg}");
let sigstr = format!("sig=@{pkg}.sig");
let domainstr = format!("{pacco_domain}/pkg/{repo}/upload");
let token = std::env::var("PACCO_TOKEN").unwrap();
let tokenstr = format!("Token: {token}");
println!("Pushing package {package} to {repo} @ {pacco_domain}");
if signed {
run_cmd!(curl -X POST -H $tokenstr -F $pkgstr -F $sigstr $domainstr).unwrap();
} else {
run_cmd!(curl -X POST -H $tokenstr -F $pkgstr $domainstr).unwrap();
}
}
#[tokio::main]
async fn main() {
let cli: PaccoCLI = argh::from_env();
match cli.cmd {
cli::PaccoCLICommands::Serve(serve_command) => {
launch(serve_command.config.unwrap_or("config.toml".to_string())).await;
}
cli::PaccoCLICommands::Build(build_command) => {
// todo : check for presence of pkgbuild
build(&build_command.image, build_command.ci);
let pkgs = find_package_files();
for pkg in &pkgs {
println!("-> Successfully built {pkg}");
}
if let Some(push) = build_command.push {
for pkg in &pkgs {
pacco_push(pkg, &push, std::env::var("SIGN_KEY").is_ok());
}
}
if let Some(out) = build_command.out {
for pkg in &pkgs {
run_cmd!(mv -v $pkg $out).unwrap();
if std::env::var("SIGN_KEY").is_ok() {
let sig = format!("{pkg}.sig");
run_cmd!(mv -v $sig $out).unwrap();
}
}
}
}
cli::PaccoCLICommands::Package(package_command) => match package_command.cmd {
cli::PackageCommands::Init(package_init_command) => {
std::fs::copy("/usr/share/pacman/PKGBUILD.proto", "PKGBUILD").unwrap();
}
cli::PackageCommands::Info(package_info_command) => {
run_cmd!(makepkg - -printsrcinfo - -sync).unwrap();
}
cli::PackageCommands::Push(package_push_command) => {
pacco_push(
&package_push_command.package,
&package_push_command.repo,
package_push_command.signed,
);
}
},
cli::PaccoCLICommands::Repo(repo_command) => {
match repo_command.cmd {
cli::RepoCommands::Add(repo_add) => {
let pkg = repo_add.package;
let pkg_filename = Path::new(&pkg).file_name().unwrap().to_str().unwrap();
let (_, _, _, arch, _) = Package::extract_pkg_name(pkg_filename).unwrap();
let repo = Repository::new_at(
&infer_repo(&repo_add.repo.unwrap_or(".".to_string()))
.expect("No repository found"),
);
println!("Using repository {:?}", repo.directory);
if matches!(arch, Architecture::any) {
let archs = repo.arch();
for a in archs {
let db = repo.db_path(a);
run_cmd!(repo-add $db $pkg).unwrap();
}
} else {
let db = repo.db_path(arch);
run_cmd!(repo-add $db $pkg).unwrap();
}
}
cli::RepoCommands::Rm(repo_rm) => {
let pkg = repo_rm.package;
let pkg_filename = Path::new(&pkg).file_name().unwrap().to_str().unwrap();
let (pkg_name, _, _, arch, _) =
Package::extract_pkg_name(pkg_filename).unwrap();
let repo = Repository::new_at(
&infer_repo(&repo_rm.repo.unwrap_or(".".to_string()))
.expect("No repository found"),
);
println!("Using repository {:?}", repo.directory);
let archs = repo.arch();
for a in archs {
let db = repo.db_path(a);
run_cmd!(repo-remove $db $pkg_name).unwrap();
}
}
cli::RepoCommands::Init(repo_init) => {
let archs: Vec<_> = repo_init
.arch
.into_iter()
.map(|x| Architecture::parse(&x).expect("no valid arch"))
.collect();
let archs = if archs.is_empty() {
println!(
"No architectures specified. Using {}",
Architecture::own().to_string()
);
vec![Architecture::own()]
} else {
archs
};
Repository::create_at(
std::env::current_dir().unwrap().join(&repo_init.name),
archs,
);
}
cli::RepoCommands::Index(repo_index) => {
// reindex all packages
let repo = Repository::new_at(
&infer_repo(&repo_index.repo.unwrap_or(".".to_string()))
.expect("No repository found"),
);
for arch in repo.arch() {
// todo : implement
}
}
}
}
}
}
/// Checks if the given directory is a repo base by looking for known arch subdirectories.
fn is_repo_base(path: &Path) -> bool {
let arch_dirs = ["x86_64", "aarch64", "any"];
arch_dirs.iter().any(|dir| path.join(dir).is_dir())
}
/// Infers the base path of a repository from any path within it.
/// Searches upward for up to 4 levels, looking for a folder containing known arch dirs.
pub fn infer_repo(path: &str) -> Option<String> {
let mut current = Path::new(path)
.canonicalize()
.unwrap_or_else(|_| PathBuf::from(path));
let mut steps = 0;
while current.exists() && steps < 4 {
if is_repo_base(&current) {
return Some(current.to_string_lossy().to_string());
}
if let Some(parent) = current.parent() {
current = parent.to_path_buf();
steps += 1;
} else {
break;
}
}
None
}