pacco cli
Some checks failed
ci/woodpecker/push/build Pipeline failed

This commit is contained in:
JMARyA 2025-04-16 08:19:05 +02:00
parent b33439656a
commit ad2ae9b95e
Signed by: jmarya
GPG key ID: 901B2ADDF27C2263
8 changed files with 696 additions and 15 deletions

128
Cargo.lock generated
View file

@ -103,6 +103,38 @@ version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457"
[[package]]
name = "argh"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34ff18325c8a36b82f992e533ece1ec9f9a9db446bd1c14d4f936bac88fcd240"
dependencies = [
"argh_derive",
"argh_shared",
"rust-fuzzy-search",
]
[[package]]
name = "argh_derive"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adb7b2b83a50d329d5d8ccc620f5c7064028828538bdf5646acd60dc1f767803"
dependencies = [
"argh_shared",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "argh_shared"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a464143cc82dedcdc3928737445362466b7674b5db4e2eb8e869846d6d84f4f6"
dependencies = [
"serde",
]
[[package]]
name = "async-stream"
version = "0.3.6"
@ -335,6 +367,12 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "cfg_aliases"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]]
name = "chrono"
version = "0.4.40"
@ -360,6 +398,32 @@ dependencies = [
"inout",
]
[[package]]
name = "cmd_lib"
version = "1.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "371c15a3c178d0117091bd84414545309ca979555b1aad573ef591ad58818d41"
dependencies = [
"cmd_lib_macros",
"env_logger 0.10.2",
"faccess",
"lazy_static",
"log",
"os_pipe",
]
[[package]]
name = "cmd_lib_macros"
version = "1.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb844bd05be34d91eb67101329aeba9d3337094c04fd8507d821db7ebb488eaf"
dependencies = [
"proc-macro-error2",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "colorchoice"
version = "1.0.3"
@ -734,6 +798,17 @@ dependencies = [
"pin-project-lite",
]
[[package]]
name = "faccess"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59ae66425802d6a903e268ae1a08b8c38ba143520f227a205edf4e9c7e3e26d5"
dependencies = [
"bitflags 1.3.2",
"libc",
"winapi",
]
[[package]]
name = "fastrand"
version = "2.3.0"
@ -1725,6 +1800,18 @@ dependencies = [
"tempfile",
]
[[package]]
name = "nix"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46"
dependencies = [
"bitflags 2.9.0",
"cfg-if",
"cfg_aliases",
"libc",
]
[[package]]
name = "nu-ansi-term"
version = "0.46.0"
@ -1867,6 +1954,16 @@ dependencies = [
"vcpkg",
]
[[package]]
name = "os_pipe"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ffd2b0a5634335b135d5728d84c5e0fd726954b87111f7506a61c502280d982"
dependencies = [
"libc",
"windows-sys 0.59.0",
]
[[package]]
name = "overload"
version = "0.1.1"
@ -1877,13 +1974,16 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
name = "pacco"
version = "0.1.0"
dependencies = [
"argh",
"based",
"bytesize",
"chrono",
"cmd_lib",
"comrade",
"env_logger 0.11.7",
"log",
"maud",
"nix",
"rand 0.9.0",
"reqwest 0.12.15",
"rocket",
@ -2054,6 +2154,28 @@ dependencies = [
"version_check",
]
[[package]]
name = "proc-macro-error-attr2"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5"
dependencies = [
"proc-macro2",
"quote",
]
[[package]]
name = "proc-macro-error2"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802"
dependencies = [
"proc-macro-error-attr2",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "proc-macro2"
version = "1.0.94"
@ -2494,6 +2616,12 @@ dependencies = [
"zeroize",
]
[[package]]
name = "rust-fuzzy-search"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a157657054ffe556d8858504af8a672a054a6e0bd9e8ee531059100c0fa11bb2"
[[package]]
name = "rustc-demangle"
version = "0.1.24"

View file

@ -20,3 +20,6 @@ sqlx = "0.8.2"
tar = "0.4.43"
toml = "0.8.19"
zstd = "0.13.2"
argh = "0.1.13"
cmd_lib = "1.9.5"
nix = { version = "0.29.0", features = ["user"] }

View file

@ -15,4 +15,4 @@ COPY --from=builder /app/target/release/pacco /pacco
WORKDIR /
CMD ["/pacco"]
CMD ["/pacco", "serve", "/config.toml"]

View file

@ -28,3 +28,24 @@ curl -X POST \
-F "sig=@./<pkg_name>-<version>-<rel>-<arch>.pkg.tar.zst.sig" \
"https://<domain>/pkg/<repo>/upload"
```
### Build with CI
You can add the following as a CI pipeline to automatically build and push a new package version.
```yml
when:
- event: push
branch: main
steps:
- name: "PKGBUILD"
image: git.hydrar.de/jmarya/pacco:latest
commands:
- pacco build --ci --push navos
environment:
PACCO_HOST: "https://pac.hydrar.de"
PACCO_TOKEN:
from_secret: pacco_token
SIGN_KEY:
from_secret: navos_key
```

157
src/cli.rs Normal file
View file

@ -0,0 +1,157 @@
// TODO : pacco cli
use argh::FromArgs;
#[derive(FromArgs, PartialEq, Debug)]
/// Pacco Package Server
pub struct PaccoCLI {
#[argh(subcommand)]
pub cmd: PaccoCLICommands,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "pkg")]
/// Package commands
pub struct PackageCommand {
#[argh(subcommand)]
pub cmd: PackageCommands,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "repo")]
/// Repository commands
pub struct RepoCommand {
#[argh(subcommand)]
pub cmd: RepoCommands,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand)]
pub enum PaccoCLICommands {
Serve(ServeCommand),
Build(BuildCommand),
Package(PackageCommand),
Repo(RepoCommand),
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "serve", description = "Start a package server")]
pub struct ServeCommand {
#[argh(option)]
/// config file
pub config: Option<String>,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "build", description = "Build a package")]
pub struct BuildCommand {
#[argh(option)]
/// push to a repo after building
pub push: Option<String>,
#[argh(switch)]
/// build is run inside a CI environment
pub ci: bool,
#[argh(option)]
/// output directory
pub out: Option<String>,
#[argh(option, default = r#"String::from("archlinux")"#)]
/// docker build image
pub image: String,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand)]
pub enum PackageCommands {
Init(PackageInitCommand),
Info(PackageInfoCommand),
Push(PackagePushCommand),
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "init")]
/// Init a template PKGBUILD
pub struct PackageInitCommand {}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "info")]
/// Print package info from PKGBUILD
pub struct PackageInfoCommand {}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "push")]
/// Push a package file to a pacco server
pub struct PackagePushCommand {
#[argh(switch)]
/// push signature
pub signed: bool,
#[argh(positional)]
/// package file
pub package: String,
#[argh(positional)]
/// repository
pub repo: String,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand)]
pub enum RepoCommands {
Add(RepoAdd),
Rm(RepoRm),
Init(RepoInit),
Index(RepoIndex),
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "add")]
/// Add a package to repo
pub struct RepoAdd {
#[argh(positional)]
/// package to add
pub package: String,
#[argh(positional)]
/// repository
pub repo: Option<String>,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "rm")]
/// Remove a package from repo
pub struct RepoRm {
#[argh(positional)]
/// package to remove
pub package: String,
#[argh(positional)]
/// repository
pub repo: Option<String>,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "init")]
/// Init a new repo
pub struct RepoInit {
#[argh(option, short = 'a')]
/// arch
pub arch: Vec<String>,
#[argh(positional)]
/// repo name
pub name: String,
}
#[derive(FromArgs, PartialEq, Debug)]
#[argh(subcommand, name = "index")]
/// Reindex all packages in repo
pub struct RepoIndex {
#[argh(positional)]
/// repository
pub repo: Option<String>,
}
// TODO : new cli cmd
// conf through env vars -> build container
// TODO : move serving fn to pacco serve <config>

View file

@ -1,27 +1,32 @@
use std::net::Ipv4Addr;
use std::path::{Path, PathBuf};
use based::asset::AssetRoutes;
use based::auth::User;
use based::get_pg;
use based::ui::components::prelude::Shell;
use based::ui::prelude::*;
use cli::PaccoCLI;
use cmd_lib::{run_cmd, run_fun};
use config::Config;
use pacco::pkg::arch::Architecture;
use pacco::pkg::package::run_command;
use pacco::pkg::{Package, Repository};
use rocket::data::ToByteUnit;
use rocket::routes;
use rocket::{routes, tokio};
pub mod cli;
pub mod config;
pub mod routes;
#[rocket::launch]
async fn launch() -> _ {
async fn launch(config: String) {
env_logger::init();
let pg = get_pg!();
sqlx::migrate!("./migrations").run(pg).await.unwrap();
let config: Config =
toml::from_str(&std::fs::read_to_string("config.toml").unwrap_or_default())
.unwrap_or_default();
toml::from_str(&std::fs::read_to_string(&config).unwrap_or_default()).unwrap_or_default();
let _ = User::create("admin".to_string(), "admin", based::auth::UserRole::Admin).await;
@ -67,4 +72,336 @@ async fn launch() -> _ {
])
.manage(config)
.manage(shell)
.launch()
.await
.unwrap();
}
use std::process::{Command, Stdio};
pub fn build(image: &str, ci: bool) {
// Get current directory
let current_dir = std::env::current_dir().expect("Failed to get current directory");
let uid = nix::unistd::Uid::current().as_raw();
let move_pkg = format!("rsync -a --chown={uid}:{uid} /build/*.pkg.tar.* /workdir/");
let sign_key = std::env::var("SIGN_KEY");
// Build the Docker command
let mut docker_script = vec![
"set -e".to_string(),
"pacman -Syu --noconfirm".to_string(),
"pacman -S --noconfirm rsync base-devel".to_string(),
"rsync -a /workdir/ /build/".to_string(),
"useradd -m build".to_string(),
"echo 'ALL ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers".to_string(),
"chown -R build /build".to_string(),
];
if ci {
if std::fs::exists("./pacman.conf").unwrap() {
println!("-> Using custom pacman.conf");
docker_script.extend(["cp -v ./workdir/pacman.conf /etc/pacman.conf".to_string()]);
}
if std::fs::exists("./makepkg.conf").unwrap() {
println!("-> Using custom makepkg.conf");
docker_script.extend(["cp -v ./workdir/pacman.conf /etc/makepkg.conf".to_string()]);
}
}
if let Ok(sign_key) = sign_key {
println!("Found signing key. Package will be signed.");
let sign_key = sign_key.trim();
let sign_key = sign_key.replace('\n', "\\n");
let import_cmd = format!("echo -e '{sign_key}'|gpg --import");
let import_cmd_user = format!("su build -c \"echo -e '{sign_key}'|gpg --import\"");
let export_var_cmd = format!(
"export GPGKEY=$(gpg --list-secret-keys --with-colons | awk -F: '/^fpr/ {{ print $10 }}'|head -n1)"
);
let trust_cmd = format!(
"su build -w GPGKEY -c sh -c 'echo -e \"5\" | gpg --batch --yes --no-tty --command-fd 0 --edit-key $GPGKEY trust'"
);
docker_script.extend([
import_cmd,
import_cmd_user,
export_var_cmd,
trust_cmd,
"su build -w GPGKEY -c 'cd /build && makepkg -s -C -c --skippgpcheck --sign --noconfirm'".to_string(),
]);
} else {
docker_script.extend([
"su build -w GPGKEY -c 'cd /build && makepkg -c -C -s --noconfirm --skippgpcheck'"
.to_string(),
]);
}
docker_script.extend([move_pkg]);
// Build the Docker run command
let status = if ci {
Command::new("bash")
.stderr(Stdio::inherit())
.stdout(Stdio::inherit())
.args([
"-c",
&format!("mkdir /build && {}", docker_script.join(";")),
])
.status()
.expect("Failed to run build")
} else {
// TODO : mount custom pacman.conf + makepkg.conf
let workdir_vol = format!("{}:/workdir", current_dir.display());
let mut args = vec![
"run",
"--rm", // Remove container after exit
"-v",
&workdir_vol, // Mount current dir to /workdir
];
let mut extra_vols = Vec::new();
if std::fs::exists("./pacman.conf").unwrap() {
println!("-> Using custom pacman.conf");
extra_vols.push(format!(
"{}:/pacman.conf",
current_dir.join("pacman.conf").display()
));
}
if std::fs::exists("./makepkg.conf").unwrap() {
println!("-> Using custom makepkg.conf");
extra_vols.push(format!(
"{}:/makepkg.conf",
current_dir.join("makepkg.conf").display()
));
}
for vol in &extra_vols {
args.extend([vol.as_str()]);
}
let cmd = format!(
// We mount /workdir and work inside the container
"mkdir /build && {}",
docker_script.join(";")
);
args.extend([
"-w", "/workdir", // Set working directory
image, // Docker Base Image
"bash", "-c", &cmd,
]);
Command::new("docker")
.stderr(Stdio::inherit())
.stdout(Stdio::inherit())
.args(args)
.status()
.expect("Failed to start Docker")
};
if !status.success() {
eprintln!("Build failed with status: {:?}", status);
}
}
pub fn find_package_files() -> Vec<String> {
let output = Command::new("sh")
.arg("-c")
.arg("ls -1 *.pkg.tar.*")
.output()
.expect("failed to execute process");
let res = String::from_utf8_lossy(&output.stdout).to_string();
res.split("\n")
.filter_map(|x| {
let x = x.trim();
if x.is_empty() {
None
} else {
if x.ends_with(".sig") {
return None;
}
Some(x.to_string())
}
})
.collect()
}
pub fn pacco_push(package: &str, repo: &str, signed: bool) {
let pacco_domain = std::env::var("PACCO_HOST").unwrap();
let pkg = package;
let repo = repo;
let pkgstr = format!("pkg=@{pkg}");
let sigstr = format!("sig=@{pkg}.sig");
let domainstr = format!("{pacco_domain}/pkg/{repo}/upload");
let token = std::env::var("PACCO_TOKEN").unwrap();
let tokenstr = format!("Token: {token}");
println!("Pushing package {package} to {repo} @ {pacco_domain}");
if signed {
run_cmd!(curl -X POST -H $tokenstr -F $pkgstr -F $sigstr $domainstr).unwrap();
} else {
run_cmd!(curl -X POST -H $tokenstr -F $pkgstr $domainstr).unwrap();
}
}
#[tokio::main]
async fn main() {
let cli: PaccoCLI = argh::from_env();
match cli.cmd {
cli::PaccoCLICommands::Serve(serve_command) => {
launch(serve_command.config.unwrap_or("config.toml".to_string())).await;
}
cli::PaccoCLICommands::Build(build_command) => {
// todo : check for presence of pkgbuild
build(&build_command.image, build_command.ci);
let pkgs = find_package_files();
for pkg in &pkgs {
println!("-> Successfully built {pkg}");
}
if let Some(push) = build_command.push {
for pkg in &pkgs {
pacco_push(pkg, &push, std::env::var("SIGN_KEY").is_ok());
}
}
if let Some(out) = build_command.out {
for pkg in &pkgs {
run_cmd!(mv -v $pkg $out).unwrap();
if std::env::var("SIGN_KEY").is_ok() {
let sig = format!("{pkg}.sig");
run_cmd!(mv -v $sig $out).unwrap();
}
}
}
}
cli::PaccoCLICommands::Package(package_command) => match package_command.cmd {
cli::PackageCommands::Init(package_init_command) => {
std::fs::copy("/usr/share/pacman/PKGBUILD.proto", "PKGBUILD").unwrap();
}
cli::PackageCommands::Info(package_info_command) => {
run_cmd!(makepkg - -printsrcinfo - -sync).unwrap();
}
cli::PackageCommands::Push(package_push_command) => {
pacco_push(
&package_push_command.package,
&package_push_command.repo,
package_push_command.signed,
);
}
},
cli::PaccoCLICommands::Repo(repo_command) => {
match repo_command.cmd {
cli::RepoCommands::Add(repo_add) => {
let pkg = repo_add.package;
let pkg_filename = Path::new(&pkg).file_name().unwrap().to_str().unwrap();
let (_, _, _, arch, _) = Package::extract_pkg_name(pkg_filename).unwrap();
let repo = Repository::new_at(
&infer_repo(&repo_add.repo.unwrap_or(".".to_string()))
.expect("No repository found"),
);
println!("Using repository {:?}", repo.directory);
if matches!(arch, Architecture::any) {
let archs = repo.arch();
for a in archs {
let db = repo.db_path(a);
run_cmd!(repo-add $db $pkg).unwrap();
}
} else {
let db = repo.db_path(arch);
run_cmd!(repo-add $db $pkg).unwrap();
}
}
cli::RepoCommands::Rm(repo_rm) => {
let pkg = repo_rm.package;
let pkg_filename = Path::new(&pkg).file_name().unwrap().to_str().unwrap();
let (pkg_name, _, _, arch, _) =
Package::extract_pkg_name(pkg_filename).unwrap();
let repo = Repository::new_at(
&infer_repo(&repo_rm.repo.unwrap_or(".".to_string()))
.expect("No repository found"),
);
println!("Using repository {:?}", repo.directory);
let archs = repo.arch();
for a in archs {
let db = repo.db_path(a);
run_cmd!(repo-remove $db $pkg_name).unwrap();
}
}
cli::RepoCommands::Init(repo_init) => {
let archs: Vec<_> = repo_init
.arch
.into_iter()
.map(|x| Architecture::parse(&x).expect("no valid arch"))
.collect();
let archs = if archs.is_empty() {
println!(
"No architectures specified. Using {}",
Architecture::own().to_string()
);
vec![Architecture::own()]
} else {
archs
};
Repository::create_at(
std::env::current_dir().unwrap().join(&repo_init.name),
archs,
);
}
cli::RepoCommands::Index(repo_index) => {
// reindex all packages
let repo = Repository::new_at(
&infer_repo(&repo_index.repo.unwrap_or(".".to_string()))
.expect("No repository found"),
);
for arch in repo.arch() {
// todo : implement
}
}
}
}
}
}
/// Checks if the given directory is a repo base by looking for known arch subdirectories.
fn is_repo_base(path: &Path) -> bool {
let arch_dirs = ["x86_64", "aarch64", "any"];
arch_dirs.iter().any(|dir| path.join(dir).is_dir())
}
/// Infers the base path of a repository from any path within it.
/// Searches upward for up to 4 levels, looking for a folder containing known arch dirs.
pub fn infer_repo(path: &str) -> Option<String> {
let mut current = Path::new(path)
.canonicalize()
.unwrap_or_else(|_| PathBuf::from(path));
let mut steps = 0;
while current.exists() && steps < 4 {
if is_repo_base(&current) {
return Some(current.to_string_lossy().to_string());
}
if let Some(parent) = current.parent() {
current = parent.to_path_buf();
steps += 1;
} else {
break;
}
}
None
}

View file

@ -16,6 +16,18 @@ impl Architecture {
}
}
/// The architecture of the binary host
pub fn own() -> Architecture {
#[cfg(target_arch = "x86_64")]
{
return Architecture::x86_64;
}
#[cfg(target_arch = "aarch64")]
{
return Architecture::aarch64;
}
}
pub fn to_string(&self) -> String {
match self {
Architecture::x86_64 => "x86_64".to_string(),

View file

@ -5,6 +5,7 @@ use super::{Package, arch::Architecture};
/// Package Repository
pub struct Repository {
pub name: String,
pub directory: std::path::PathBuf,
}
impl Repository {
@ -22,9 +23,7 @@ impl Repository {
repos
}
/// Create a new package repository with architectures from `arch`
pub fn create(name: &str, arch: Vec<Architecture>) -> Repository {
let path = PathBuf::from("./data").join(name);
pub fn create_at(path: PathBuf, arch: Vec<Architecture>) -> Repository {
std::fs::create_dir_all(&path).unwrap();
for arch in arch {
@ -32,7 +31,13 @@ impl Repository {
std::fs::create_dir_all(np).unwrap();
}
Repository::new(name).unwrap()
Repository::new_at(path.display().to_string().as_str())
}
/// Create a new package repository with architectures from `arch`
pub fn create(name: &str, arch: Vec<Architecture>) -> Repository {
let path = PathBuf::from("./data").join(name);
Self::create_at(path, arch)
}
}
@ -42,18 +47,30 @@ impl Repository {
if PathBuf::from("./data").join(name).exists() {
Some(Repository {
name: name.to_string(),
directory: PathBuf::from("./data").join(name),
})
} else {
None
}
}
pub fn new_at(path: &str) -> Self {
Repository {
name: std::path::Path::new(path)
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_string(),
directory: std::path::PathBuf::from(path),
}
}
/// Get a list of architectures for this repository
pub fn arch(&self) -> Vec<Architecture> {
let dir_path = PathBuf::from("./data").join(&self.name);
let mut arch = vec![];
if let Ok(entries) = std::fs::read_dir(dir_path) {
if let Ok(entries) = std::fs::read_dir(&self.directory) {
for entry in entries.filter_map(Result::ok) {
let file_name = entry.file_name().into_string().unwrap_or_default();
if let Some(repo_arch) = Architecture::parse(&file_name) {
@ -67,9 +84,7 @@ impl Repository {
/// Get the base path for the repository with `arch`.
pub fn base_path(&self, arch: Architecture) -> PathBuf {
PathBuf::from("./data")
.join(&self.name)
.join(arch.to_string())
self.directory.join(arch.to_string())
}
/// Get the `.db.tar.gz` content for the repository of `arch`
@ -81,6 +96,14 @@ impl Repository {
.ok()
}
pub fn db_path(&self, arch: Architecture) -> String {
self.base_path(arch)
.join(format!("{}.db.tar.gz", self.name))
.to_str()
.unwrap()
.to_string()
}
/// Get the `.db.tar.gz.sig` content for the repository of `arch`
pub fn sig_content(&self, arch: Architecture) -> Option<Vec<u8>> {
std::fs::read(