🎉 init

This commit is contained in:
JMARyA 2025-03-12 11:32:27 +01:00
commit e5135dc9e4
Signed by: jmarya
GPG key ID: 901B2ADDF27C2263
13 changed files with 4678 additions and 0 deletions

6
.gitignore vendored Normal file
View file

@ -0,0 +1,6 @@
/target
/db
/repositories
/packages
/build
/.env

4090
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

15
Cargo.toml Normal file
View file

@ -0,0 +1,15 @@
[package]
name = "pacco-makepkg"
version = "0.1.0"
edition = "2024"
[dependencies]
based = { git = "https://git.hydrar.de/jmarya/based" }
comrade = { git = "https://git.hydrar.de/jmarya/comrade" }
env_logger = "0.11.7"
log = "0.4.26"
reqwest = { version = "0.12.13", features = ["blocking", "multipart"] }
serde = { version = "1.0.219", features = ["derive"] }
sqlx = { version = "0.8.3", features = ["postgres"] }
tokio = { version = "1.44.0", features = ["full"] }
uuid = "1.15.1"

24
Dockerfile Normal file
View file

@ -0,0 +1,24 @@
FROM rust:buster as builder
RUN rustup default nightly
COPY ./Cargo.toml /app/Cargo.toml
COPY ./Cargo.lock /app/Cargo.lock
COPY ./src /app/src
COPY ./migrations /app/migrations
WORKDIR /app
RUN cargo build --release
FROM archlinux
RUN pacman -Syu --noconfirm base-devel openssl-1.1 git curl rsync systemd arch-install-scripts
COPY ./pacman.conf /etc/pacman.conf
COPY --from=builder /app/target/release/pacco-makepkg /pacco-makepkg
WORKDIR /
CMD ["/pacco-makepkg"]

6
README.md Normal file
View file

@ -0,0 +1,6 @@
# pacco-makepkg
pacco-makepkg is a simple `PKGBUILD` server.
It monitors git repositories and automatically triggers a `PKGBUILD` if a new version could be detected and pushes the new version to [pacco](https://git.hydrar.de/jmarya/pacco).
This could for example be used to make prebuild packages from the AUR.

26
docker-compose.yml Normal file
View file

@ -0,0 +1,26 @@
services:
pacco:
build: .
ports:
- "8080:8000"
privileged: true
volumes:
- ./packages:/packages
- ./repositories:/repositories
- type: tmpfs
target: /build
tmpfs:
size: 42949672960 # 40GB
env_file: .env
postgres:
image: timescale/timescaledb:latest-pg16
restart: always
ports:
- 5432:5432
volumes:
- ./db:/var/lib/postgresql/data/
environment:
- POSTGRES_USER=user
- POSTGRES_PASSWORD=pass
- POSTGRES_DB=pacco_makepkg

5
env Normal file
View file

@ -0,0 +1,5 @@
RUST_LOG=info
ROCKET_ADDRESS=0.0.0.0
DATABASE_URL=postgres://user:pass@postgres/pacco_makepkg
PACCO_URL=https://pacco.example.com
PACCO_TOKEN=secret

8
migrations/0001_init.sql Normal file
View file

@ -0,0 +1,8 @@
CREATE TABLE package (
repo TEXT NOT NULL,
pkg TEXT NOT NULL,
last_commit TEXT,
last_pkgver TEXT,
PRIMARY KEY (repo, pkg)
)

27
pacman.conf Normal file
View file

@ -0,0 +1,27 @@
[options]
HoldPkg = pacman glibc
Architecture = auto
Color
CheckSpace
ParallelDownloads = 6
DownloadUser = alpm
SigLevel = Required DatabaseOptional
LocalFileSigLevel = Optional
[core]
Include = /etc/pacman.d/mirrorlist
[extra]
Include = /etc/pacman.d/mirrorlist
[multilib]
Include = /etc/pacman.d/mirrorlist
[navos]
Server = https://pac.hydrar.de/pkg/$repo/$arch
SigLevel = Optional TrustAll
#[our]
#SigLevel = Never
#Server = https://pac.hydrar.de/pkg/$repo/$arch

192
src/builder.rs Normal file
View file

@ -0,0 +1,192 @@
use std::process::Command;
use based::get_pg;
use crate::{
git::get_commit_hash,
pacco_push,
package::{PackageIndex, find_pkg, get_pkgver},
};
pub struct BuildEnv(String, bool);
impl BuildEnv {
pub fn new() -> Self {
let tmp_env = format!("./build/pkg-{}", uuid::Uuid::new_v4().to_string());
Self::setup_root(&tmp_env);
Self(tmp_env, true)
}
pub fn setup_root(path: &str) {
if !std::fs::exists(std::path::Path::new(path)).unwrap() {
log::info!("Setting up root fs at {path}");
let _ = Command::new("mkdir").args(["-p", path]).status();
let _ = Command::new("pacstrap")
.args(["-c", path, "base", "base-devel"])
.status();
} else {
log::info!("Reusing existing root fs at {path}");
}
}
pub fn build_dir(&self) -> String {
std::path::Path::new(&self.0)
.join("build")
.to_str()
.unwrap()
.to_string()
}
pub fn new_from(path: &str) -> Self {
Self::setup_root(path);
Self(path.to_owned(), false)
}
pub fn path(&self) -> &str {
self.0.as_str()
}
pub fn copy_build_env_dir(&self, dir: &str) {
log::info!(
"Copying build env from {dir} -> {}",
std::path::Path::new(&self.0)
.join("build")
.to_str()
.unwrap()
);
Command::new("rsync")
.arg("-azhru")
.arg("--delete")
.arg(format!("{dir}/"))
.arg(format!(
"{}/",
std::path::Path::new(&self.0)
.join("build")
.to_str()
.unwrap()
))
.output()
.unwrap();
}
pub fn run_command(&self, cmd: &str) -> (bool, String) {
let output = Command::new("systemd-nspawn")
.arg("-D")
.arg(self.path())
.arg("--bind")
.arg("./pacman.conf:/etc/pacman.conf")
.arg("bash")
.arg("-c")
.arg(cmd)
.output()
.expect("Failed to run systemd-nspawn container");
let stdout = String::from_utf8_lossy(&output.stdout).to_string();
(output.status.success(), stdout)
}
pub fn cleanup(&self) {
if self.1 {
Command::new("umount")
.arg(std::path::Path::new(&self.0).join("proc"))
.output()
.unwrap();
log::info!("Removing temporary build environment at {}", self.0);
std::fs::remove_dir_all(&self.0).unwrap();
}
}
}
impl Drop for BuildEnv {
fn drop(&mut self) {
self.cleanup();
}
}
pub async fn build(pkg: &PackageIndex) -> (String, Option<(String, Vec<u8>)>) {
let base_env = BuildEnv::new();
let commit = get_commit_hash(pkg.path().to_str().unwrap());
let pkgver = get_pkgver(pkg.path().to_str().unwrap());
base_env.copy_build_env_dir(&pkg.build_dir());
log::info!(
"Building {} / {} @ {}{}",
pkg.repo,
pkg.pkg,
pkgver.as_ref().unwrap_or(&String::new()),
if let Some(c) = &commit {
format!(" [#{c}]")
} else {
String::new()
}
);
let (success, stdout) = base_env.run_command(
r#"cd /build;pacman -Syu --noconfirm;useradd build;echo "ALL ALL=(ALL) NOPASSWD: ALL"|tee -a /etc/sudoers;chown -R build /build;su -c "makepkg -c -C -s --noconfirm" build"#);
if success {
sqlx::query(
"UPDATE package SET last_commit = $1, last_pkgver = $2 WHERE repo = $3 AND pkg = $4",
)
.bind(&commit)
.bind(&pkgver)
.bind(&pkg.repo)
.bind(&pkg.pkg)
.execute(get_pg!())
.await
.unwrap();
let package = find_pkg(&base_env.build_dir()).unwrap();
log::info!(
"Successfully built {} / {} @ {}{}",
pkg.repo,
pkg.pkg,
pkgver.as_ref().unwrap_or(&String::new()),
if let Some(c) = &commit {
format!(" [#{c}]")
} else {
String::new()
}
);
std::fs::create_dir_all(std::path::Path::new("./packages").join(&pkg.repo)).unwrap();
std::fs::write(
std::path::Path::new("./packages")
.join(&pkg.repo)
.join(&package.0),
&package.1,
)
.unwrap();
pacco_push(
std::path::Path::new("./packages")
.join(&pkg.repo)
.join(&package.0)
.to_str()
.unwrap(),
&pkg.repo,
)
.await;
(stdout, Some(package))
} else {
log::error!(
"Error building {} / {} @ {}{}",
pkg.repo,
pkg.pkg,
pkgver.as_ref().unwrap_or(&String::new()),
if let Some(c) = &commit {
format!(" [#{c}]")
} else {
String::new()
}
);
(stdout, None)
}
}

53
src/git.rs Normal file
View file

@ -0,0 +1,53 @@
use std::path::Path;
use std::process::Command;
pub fn pull(repo: &str) -> Option<String> {
let repo_path = Path::new(repo);
add_git_safe_dir(repo_path.to_str().unwrap());
if repo_path.exists() {
let _ = Command::new("git")
.arg("pull")
.current_dir(repo_path)
.status()
.expect("Failed to pull latest changes");
} else {
let _ = Command::new("git")
.args(["clone", repo, repo])
.status()
.expect("Failed to clone repository");
}
get_commit_hash(repo)
}
pub fn get_commit_hash(repo: &str) -> Option<String> {
let output = Command::new("git")
.args(["rev-parse", "HEAD"])
.current_dir(repo)
.output()
.ok()?;
Some(String::from_utf8_lossy(&output.stdout).trim().to_string())
}
pub fn is_git(dir: &str) -> bool {
std::fs::exists(std::path::Path::new(dir).join(".git")).unwrap()
}
pub fn add_git_safe_dir(dir: &str) {
let abs_path = std::path::absolute(std::path::Path::new(dir)).unwrap();
let path = abs_path.to_str().unwrap();
log::info!("Adding {path} to git safe dirs");
let output = Command::new("git")
.args(["config", "--global", "--add", "safe.directory", path])
.output()
.expect("Failed to execute git config");
if !output.status.success() {
eprintln!(
"Error setting safe.directory: {}",
String::from_utf8_lossy(&output.stderr)
);
}
}

52
src/main.rs Normal file
View file

@ -0,0 +1,52 @@
use based::get_pg;
use comrade::defer;
use comrade::service::ServiceManager;
use package::{rebuild_pkgs, reindex_pkg};
use reqwest::Client;
use reqwest::multipart::{Form, Part};
use std::io::Read;
pub mod builder;
pub mod git;
pub mod package;
#[tokio::main]
async fn main() {
env_logger::init();
sqlx::migrate!().run(get_pg!()).await.unwrap();
reindex_pkg(&std::path::Path::new("./repositories")).await;
rebuild_pkgs().await;
let s = ServiceManager::new();
let s = s.spawn();
defer!(move || s.join().unwrap());
}
async fn pacco_push(pkg_path: &str, repo: &str) {
if let Ok(pacco) = std::env::var("PACCO_URL") {
log::info!("Pushing package to pacco at {pacco}");
let upload_url = format!("{}/pkg/{repo}/upload", pacco);
let client = Client::new();
let mut pkg_file = std::fs::File::open(pkg_path).unwrap();
let mut pkg_buffer = Vec::new();
pkg_file.read_to_end(&mut pkg_buffer).unwrap();
let pkg_part = Part::bytes(pkg_buffer).file_name(pkg_path.to_string());
let form = Form::new().part("pkg", pkg_part);
if let Ok(_) = client
.post(upload_url)
.multipart(form)
.header("Token", std::env::var("PACCO_TOKEN").unwrap())
.send()
.await
{
log::info!("Successfully pushed {} to pacco", pkg_path);
}
}
}

174
src/package.rs Normal file
View file

@ -0,0 +1,174 @@
use based::get_pg;
use serde::{Deserialize, Serialize};
use sqlx::FromRow;
use std::fs::read_dir;
use std::io::Read;
use std::path::Path;
use std::{fs, path::PathBuf};
use crate::builder::{BuildEnv, build};
use crate::git::{is_git, pull};
#[derive(Deserialize, Serialize, FromRow)]
pub struct PackageIndex {
pub repo: String,
pub pkg: String,
pub last_commit: Option<String>,
pub last_pkgver: Option<String>,
}
impl PackageIndex {
pub async fn all() -> Vec<Self> {
sqlx::query_as("SELECT * FROM package")
.fetch_all(get_pg!())
.await
.unwrap()
}
pub fn path(&self) -> PathBuf {
std::path::Path::new("./repositories")
.join(&self.repo)
.join(&self.pkg)
}
pub fn build_dir(&self) -> String {
format!("./repositories/{}/{}", self.repo, self.pkg)
}
}
pub async fn rebuild_pkgs() {
log::info!("Start checking for package rebuilds");
let all_pkgs = PackageIndex::all().await;
for pkg in &all_pkgs {
let mut should_rebuild = false;
if is_git(pkg.path().to_str().unwrap()) {
let git_commit = pull(pkg.path().to_str().unwrap()).unwrap_or_default();
if let Some(old_commit) = &pkg.last_commit {
if *old_commit != git_commit {
log::info!(
"Package {} / {} should rebuild: Commit {} -> {}",
pkg.repo,
pkg.pkg,
old_commit,
git_commit
);
should_rebuild = true;
}
} else {
log::info!(
"Package {} / {} should rebuild: No last commit",
pkg.repo,
pkg.pkg
);
should_rebuild = true;
}
}
let pkgver = get_pkgver(pkg.path().to_str().unwrap()).unwrap_or_default();
if let Some(old_pkgver) = &pkg.last_pkgver {
if *old_pkgver != pkgver {
log::info!(
"Package {} / {} should rebuild: pkgver {} -> {}",
pkg.repo,
pkg.pkg,
old_pkgver,
pkgver
);
should_rebuild = true;
}
} else {
log::info!(
"Package {} / {} should rebuild: No last pkgver",
pkg.repo,
pkg.pkg
);
should_rebuild = true;
}
if should_rebuild {
build(pkg).await;
}
}
log::info!("Done checking for package rebuilds");
}
pub fn get_pkgver(repo: &str) -> Option<String> {
let base_env = BuildEnv::new_from("./build/srcinfo");
base_env.copy_build_env_dir(repo);
let (success, out) = base_env.run_command(
r#"cd /build;useradd build;echo "ALL ALL=(ALL) NOPASSWD: ALL"|tee -a /etc/sudoers;chown -R build /build;su -c "makepkg --printsrcinfo" build"#);
if success {
for line in out.lines() {
let trimmed = line.trim();
if let Some(pkgver) = trimmed.strip_prefix("pkgver = ") {
return Some(pkgver.to_string());
}
}
}
None
}
pub fn find_pkg(dir: &str) -> Option<(String, Vec<u8>)> {
let path = Path::new(dir);
if let Ok(entries) = fs::read_dir(path) {
for entry in entries.flatten() {
let file_path = entry.path();
if let Some(ext) = file_path.extension() {
if ext == "zst" {
if let Some(file_name) = file_path.file_name().and_then(|n| n.to_str()) {
if file_name.ends_with(".tar.zst") {
match fs::File::open(&file_path) {
Ok(mut file) => {
let mut content = Vec::new();
if file.read_to_end(&mut content).is_ok() {
return Some((file_name.to_string(), content));
}
}
Err(_) => continue,
}
}
}
}
}
}
}
None
}
pub async fn reindex_pkg(repo_dir: &Path) {
log::info!("Indexing package builds");
let mut dir_entries = read_dir(repo_dir).unwrap();
while let Some(Ok(repo_entry)) = dir_entries.next() {
if repo_entry.file_type().unwrap().is_dir() {
let repo_name = repo_entry.file_name().into_string().unwrap();
let mut pkg_entries = read_dir(repo_entry.path()).unwrap();
while let Some(Ok(pkg_entry)) = pkg_entries.next() {
if pkg_entry.file_type().unwrap().is_dir() {
let pkg_name = pkg_entry.file_name().into_string().unwrap();
log::info!("Found package {repo_name} / {pkg_name}");
sqlx::query(
"INSERT INTO package (repo, pkg) VALUES ($1, $2) ON CONFLICT DO NOTHING",
)
.bind(&repo_name)
.bind(&pkg_name)
.execute(get_pg!())
.await
.unwrap();
}
}
}
}
log::info!("Done indexing package builds");
}