pacco cli
Some checks failed
ci/woodpecker/push/build Pipeline failed

This commit is contained in:
JMARyA 2025-04-16 08:19:05 +02:00
parent b33439656a
commit ad2ae9b95e
Signed by: jmarya
GPG key ID: 901B2ADDF27C2263
8 changed files with 696 additions and 15 deletions

157
src/cli.rs Normal file
View file

@ -0,0 +1,157 @@
// TODO : pacco cli
use argh::FromArgs;
#[derive(FromArgs, PartialEq, Debug)]
/// Pacco Package Server
pub struct PaccoCLI {
#[argh(subcommand)]
pub cmd: PaccoCLICommands,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "pkg")]
/// Package commands
pub struct PackageCommand {
#[argh(subcommand)]
pub cmd: PackageCommands,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "repo")]
/// Repository commands
pub struct RepoCommand {
#[argh(subcommand)]
pub cmd: RepoCommands,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand)]
pub enum PaccoCLICommands {
Serve(ServeCommand),
Build(BuildCommand),
Package(PackageCommand),
Repo(RepoCommand),
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "serve", description = "Start a package server")]
pub struct ServeCommand {
#[argh(option)]
/// config file
pub config: Option<String>,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "build", description = "Build a package")]
pub struct BuildCommand {
#[argh(option)]
/// push to a repo after building
pub push: Option<String>,
#[argh(switch)]
/// build is run inside a CI environment
pub ci: bool,
#[argh(option)]
/// output directory
pub out: Option<String>,
#[argh(option, default = r#"String::from("archlinux")"#)]
/// docker build image
pub image: String,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand)]
pub enum PackageCommands {
Init(PackageInitCommand),
Info(PackageInfoCommand),
Push(PackagePushCommand),
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "init")]
/// Init a template PKGBUILD
pub struct PackageInitCommand {}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "info")]
/// Print package info from PKGBUILD
pub struct PackageInfoCommand {}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "push")]
/// Push a package file to a pacco server
pub struct PackagePushCommand {
#[argh(switch)]
/// push signature
pub signed: bool,
#[argh(positional)]
/// package file
pub package: String,
#[argh(positional)]
/// repository
pub repo: String,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand)]
pub enum RepoCommands {
Add(RepoAdd),
Rm(RepoRm),
Init(RepoInit),
Index(RepoIndex),
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "add")]
/// Add a package to repo
pub struct RepoAdd {
#[argh(positional)]
/// package to add
pub package: String,
#[argh(positional)]
/// repository
pub repo: Option<String>,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "rm")]
/// Remove a package from repo
pub struct RepoRm {
#[argh(positional)]
/// package to remove
pub package: String,
#[argh(positional)]
/// repository
pub repo: Option<String>,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "init")]
/// Init a new repo
pub struct RepoInit {
#[argh(option, short = 'a')]
/// arch
pub arch: Vec<String>,
#[argh(positional)]
/// repo name
pub name: String,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "index")]
/// Reindex all packages in repo
pub struct RepoIndex {
#[argh(positional)]
/// repository
pub repo: Option<String>,
}
// TODO : new cli cmd
// conf through env vars -> build container
// TODO : move serving fn to pacco serve <config>

View file

@ -1,27 +1,32 @@
use std::net::Ipv4Addr;
use std::path::{Path, PathBuf};
use based::asset::AssetRoutes;
use based::auth::User;
use based::get_pg;
use based::ui::components::prelude::Shell;
use based::ui::prelude::*;
use cli::PaccoCLI;
use cmd_lib::{run_cmd, run_fun};
use config::Config;
use pacco::pkg::arch::Architecture;
use pacco::pkg::package::run_command;
use pacco::pkg::{Package, Repository};
use rocket::data::ToByteUnit;
use rocket::routes;
use rocket::{routes, tokio};
pub mod cli;
pub mod config;
pub mod routes;
#[rocket::launch]
async fn launch() -> _ {
async fn launch(config: String) {
env_logger::init();
let pg = get_pg!();
sqlx::migrate!("./migrations").run(pg).await.unwrap();
let config: Config =
toml::from_str(&std::fs::read_to_string("config.toml").unwrap_or_default())
.unwrap_or_default();
toml::from_str(&std::fs::read_to_string(&config).unwrap_or_default()).unwrap_or_default();
let _ = User::create("admin".to_string(), "admin", based::auth::UserRole::Admin).await;
@ -67,4 +72,336 @@ async fn launch() -> _ {
])
.manage(config)
.manage(shell)
.launch()
.await
.unwrap();
}
use std::process::{Command, Stdio};
pub fn build(image: &str, ci: bool) {
// Get current directory
let current_dir = std::env::current_dir().expect("Failed to get current directory");
let uid = nix::unistd::Uid::current().as_raw();
let move_pkg = format!("rsync -a --chown={uid}:{uid} /build/*.pkg.tar.* /workdir/");
let sign_key = std::env::var("SIGN_KEY");
// Build the Docker command
let mut docker_script = vec![
"set -e".to_string(),
"pacman -Syu --noconfirm".to_string(),
"pacman -S --noconfirm rsync base-devel".to_string(),
"rsync -a /workdir/ /build/".to_string(),
"useradd -m build".to_string(),
"echo 'ALL ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers".to_string(),
"chown -R build /build".to_string(),
];
if ci {
if std::fs::exists("./pacman.conf").unwrap() {
println!("-> Using custom pacman.conf");
docker_script.extend(["cp -v ./workdir/pacman.conf /etc/pacman.conf".to_string()]);
}
if std::fs::exists("./makepkg.conf").unwrap() {
println!("-> Using custom makepkg.conf");
docker_script.extend(["cp -v ./workdir/pacman.conf /etc/makepkg.conf".to_string()]);
}
}
if let Ok(sign_key) = sign_key {
println!("Found signing key. Package will be signed.");
let sign_key = sign_key.trim();
let sign_key = sign_key.replace('\n', "\\n");
let import_cmd = format!("echo -e '{sign_key}'|gpg --import");
let import_cmd_user = format!("su build -c \"echo -e '{sign_key}'|gpg --import\"");
let export_var_cmd = format!(
"export GPGKEY=$(gpg --list-secret-keys --with-colons | awk -F: '/^fpr/ {{ print $10 }}'|head -n1)"
);
let trust_cmd = format!(
"su build -w GPGKEY -c sh -c 'echo -e \"5\" | gpg --batch --yes --no-tty --command-fd 0 --edit-key $GPGKEY trust'"
);
docker_script.extend([
import_cmd,
import_cmd_user,
export_var_cmd,
trust_cmd,
"su build -w GPGKEY -c 'cd /build && makepkg -s -C -c --skippgpcheck --sign --noconfirm'".to_string(),
]);
} else {
docker_script.extend([
"su build -w GPGKEY -c 'cd /build && makepkg -c -C -s --noconfirm --skippgpcheck'"
.to_string(),
]);
}
docker_script.extend([move_pkg]);
// Build the Docker run command
let status = if ci {
Command::new("bash")
.stderr(Stdio::inherit())
.stdout(Stdio::inherit())
.args([
"-c",
&format!("mkdir /build && {}", docker_script.join(";")),
])
.status()
.expect("Failed to run build")
} else {
// TODO : mount custom pacman.conf + makepkg.conf
let workdir_vol = format!("{}:/workdir", current_dir.display());
let mut args = vec![
"run",
"--rm", // Remove container after exit
"-v",
&workdir_vol, // Mount current dir to /workdir
];
let mut extra_vols = Vec::new();
if std::fs::exists("./pacman.conf").unwrap() {
println!("-> Using custom pacman.conf");
extra_vols.push(format!(
"{}:/pacman.conf",
current_dir.join("pacman.conf").display()
));
}
if std::fs::exists("./makepkg.conf").unwrap() {
println!("-> Using custom makepkg.conf");
extra_vols.push(format!(
"{}:/makepkg.conf",
current_dir.join("makepkg.conf").display()
));
}
for vol in &extra_vols {
args.extend([vol.as_str()]);
}
let cmd = format!(
// We mount /workdir and work inside the container
"mkdir /build && {}",
docker_script.join(";")
);
args.extend([
"-w", "/workdir", // Set working directory
image, // Docker Base Image
"bash", "-c", &cmd,
]);
Command::new("docker")
.stderr(Stdio::inherit())
.stdout(Stdio::inherit())
.args(args)
.status()
.expect("Failed to start Docker")
};
if !status.success() {
eprintln!("Build failed with status: {:?}", status);
}
}
pub fn find_package_files() -> Vec<String> {
let output = Command::new("sh")
.arg("-c")
.arg("ls -1 *.pkg.tar.*")
.output()
.expect("failed to execute process");
let res = String::from_utf8_lossy(&output.stdout).to_string();
res.split("\n")
.filter_map(|x| {
let x = x.trim();
if x.is_empty() {
None
} else {
if x.ends_with(".sig") {
return None;
}
Some(x.to_string())
}
})
.collect()
}
pub fn pacco_push(package: &str, repo: &str, signed: bool) {
let pacco_domain = std::env::var("PACCO_HOST").unwrap();
let pkg = package;
let repo = repo;
let pkgstr = format!("pkg=@{pkg}");
let sigstr = format!("sig=@{pkg}.sig");
let domainstr = format!("{pacco_domain}/pkg/{repo}/upload");
let token = std::env::var("PACCO_TOKEN").unwrap();
let tokenstr = format!("Token: {token}");
println!("Pushing package {package} to {repo} @ {pacco_domain}");
if signed {
run_cmd!(curl -X POST -H $tokenstr -F $pkgstr -F $sigstr $domainstr).unwrap();
} else {
run_cmd!(curl -X POST -H $tokenstr -F $pkgstr $domainstr).unwrap();
}
}
#[tokio::main]
async fn main() {
let cli: PaccoCLI = argh::from_env();
match cli.cmd {
cli::PaccoCLICommands::Serve(serve_command) => {
launch(serve_command.config.unwrap_or("config.toml".to_string())).await;
}
cli::PaccoCLICommands::Build(build_command) => {
// todo : check for presence of pkgbuild
build(&build_command.image, build_command.ci);
let pkgs = find_package_files();
for pkg in &pkgs {
println!("-> Successfully built {pkg}");
}
if let Some(push) = build_command.push {
for pkg in &pkgs {
pacco_push(pkg, &push, std::env::var("SIGN_KEY").is_ok());
}
}
if let Some(out) = build_command.out {
for pkg in &pkgs {
run_cmd!(mv -v $pkg $out).unwrap();
if std::env::var("SIGN_KEY").is_ok() {
let sig = format!("{pkg}.sig");
run_cmd!(mv -v $sig $out).unwrap();
}
}
}
}
cli::PaccoCLICommands::Package(package_command) => match package_command.cmd {
cli::PackageCommands::Init(package_init_command) => {
std::fs::copy("/usr/share/pacman/PKGBUILD.proto", "PKGBUILD").unwrap();
}
cli::PackageCommands::Info(package_info_command) => {
run_cmd!(makepkg - -printsrcinfo - -sync).unwrap();
}
cli::PackageCommands::Push(package_push_command) => {
pacco_push(
&package_push_command.package,
&package_push_command.repo,
package_push_command.signed,
);
}
},
cli::PaccoCLICommands::Repo(repo_command) => {
match repo_command.cmd {
cli::RepoCommands::Add(repo_add) => {
let pkg = repo_add.package;
let pkg_filename = Path::new(&pkg).file_name().unwrap().to_str().unwrap();
let (_, _, _, arch, _) = Package::extract_pkg_name(pkg_filename).unwrap();
let repo = Repository::new_at(
&infer_repo(&repo_add.repo.unwrap_or(".".to_string()))
.expect("No repository found"),
);
println!("Using repository {:?}", repo.directory);
if matches!(arch, Architecture::any) {
let archs = repo.arch();
for a in archs {
let db = repo.db_path(a);
run_cmd!(repo-add $db $pkg).unwrap();
}
} else {
let db = repo.db_path(arch);
run_cmd!(repo-add $db $pkg).unwrap();
}
}
cli::RepoCommands::Rm(repo_rm) => {
let pkg = repo_rm.package;
let pkg_filename = Path::new(&pkg).file_name().unwrap().to_str().unwrap();
let (pkg_name, _, _, arch, _) =
Package::extract_pkg_name(pkg_filename).unwrap();
let repo = Repository::new_at(
&infer_repo(&repo_rm.repo.unwrap_or(".".to_string()))
.expect("No repository found"),
);
println!("Using repository {:?}", repo.directory);
let archs = repo.arch();
for a in archs {
let db = repo.db_path(a);
run_cmd!(repo-remove $db $pkg_name).unwrap();
}
}
cli::RepoCommands::Init(repo_init) => {
let archs: Vec<_> = repo_init
.arch
.into_iter()
.map(|x| Architecture::parse(&x).expect("no valid arch"))
.collect();
let archs = if archs.is_empty() {
println!(
"No architectures specified. Using {}",
Architecture::own().to_string()
);
vec![Architecture::own()]
} else {
archs
};
Repository::create_at(
std::env::current_dir().unwrap().join(&repo_init.name),
archs,
);
}
cli::RepoCommands::Index(repo_index) => {
// reindex all packages
let repo = Repository::new_at(
&infer_repo(&repo_index.repo.unwrap_or(".".to_string()))
.expect("No repository found"),
);
for arch in repo.arch() {
// todo : implement
}
}
}
}
}
}
/// Checks if the given directory is a repo base by looking for known arch subdirectories.
fn is_repo_base(path: &Path) -> bool {
let arch_dirs = ["x86_64", "aarch64", "any"];
arch_dirs.iter().any(|dir| path.join(dir).is_dir())
}
/// Infers the base path of a repository from any path within it.
/// Searches upward for up to 4 levels, looking for a folder containing known arch dirs.
pub fn infer_repo(path: &str) -> Option<String> {
let mut current = Path::new(path)
.canonicalize()
.unwrap_or_else(|_| PathBuf::from(path));
let mut steps = 0;
while current.exists() && steps < 4 {
if is_repo_base(&current) {
return Some(current.to_string_lossy().to_string());
}
if let Some(parent) = current.parent() {
current = parent.to_path_buf();
steps += 1;
} else {
break;
}
}
None
}

View file

@ -16,6 +16,18 @@ impl Architecture {
}
}
/// The architecture of the binary host
pub fn own() -> Architecture {
#[cfg(target_arch = "x86_64")]
{
return Architecture::x86_64;
}
#[cfg(target_arch = "aarch64")]
{
return Architecture::aarch64;
}
}
pub fn to_string(&self) -> String {
match self {
Architecture::x86_64 => "x86_64".to_string(),

View file

@ -5,6 +5,7 @@ use super::{Package, arch::Architecture};
/// Package Repository
pub struct Repository {
pub name: String,
pub directory: std::path::PathBuf,
}
impl Repository {
@ -22,9 +23,7 @@ impl Repository {
repos
}
/// Create a new package repository with architectures from `arch`
pub fn create(name: &str, arch: Vec<Architecture>) -> Repository {
let path = PathBuf::from("./data").join(name);
pub fn create_at(path: PathBuf, arch: Vec<Architecture>) -> Repository {
std::fs::create_dir_all(&path).unwrap();
for arch in arch {
@ -32,7 +31,13 @@ impl Repository {
std::fs::create_dir_all(np).unwrap();
}
Repository::new(name).unwrap()
Repository::new_at(path.display().to_string().as_str())
}
/// Create a new package repository with architectures from `arch`
pub fn create(name: &str, arch: Vec<Architecture>) -> Repository {
let path = PathBuf::from("./data").join(name);
Self::create_at(path, arch)
}
}
@ -42,18 +47,30 @@ impl Repository {
if PathBuf::from("./data").join(name).exists() {
Some(Repository {
name: name.to_string(),
directory: PathBuf::from("./data").join(name),
})
} else {
None
}
}
pub fn new_at(path: &str) -> Self {
Repository {
name: std::path::Path::new(path)
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_string(),
directory: std::path::PathBuf::from(path),
}
}
/// Get a list of architectures for this repository
pub fn arch(&self) -> Vec<Architecture> {
let dir_path = PathBuf::from("./data").join(&self.name);
let mut arch = vec![];
if let Ok(entries) = std::fs::read_dir(dir_path) {
if let Ok(entries) = std::fs::read_dir(&self.directory) {
for entry in entries.filter_map(Result::ok) {
let file_name = entry.file_name().into_string().unwrap_or_default();
if let Some(repo_arch) = Architecture::parse(&file_name) {
@ -67,9 +84,7 @@ impl Repository {
/// Get the base path for the repository with `arch`.
pub fn base_path(&self, arch: Architecture) -> PathBuf {
PathBuf::from("./data")
.join(&self.name)
.join(arch.to_string())
self.directory.join(arch.to_string())
}
/// Get the `.db.tar.gz` content for the repository of `arch`
@ -81,6 +96,14 @@ impl Repository {
.ok()
}
pub fn db_path(&self, arch: Architecture) -> String {
self.base_path(arch)
.join(format!("{}.db.tar.gz", self.name))
.to_str()
.unwrap()
.to_string()
}
/// Get the `.db.tar.gz.sig` content for the repository of `arch`
pub fn sig_content(&self, arch: Architecture) -> Option<Vec<u8>> {
std::fs::read(