This commit is contained in:
cy 2025-04-13 14:52:12 -04:00
parent e58cf2bbd0
commit 681ee5e826
Signed by: cy
SSH key fingerprint: SHA256:o/geVWV4om1QhUSkKvDQeW/eAihwnjyXkqMwrVdbuts
6 changed files with 607 additions and 27 deletions

View file

@ -9,6 +9,7 @@ use nixcp::NixCp;
mod cli;
mod nixcp;
mod path_info;
mod uploader;
#[derive(Parser, Debug)]
#[command(version, name = "nixcp")]
@ -21,21 +22,9 @@ struct Cli {
to: String,
/// Upstream cache to check against. Can be specified multiple times.
/// cache.nixos.org is always included (unless --no-nixos-cache is passed)
/// cache.nixos.org is always included
#[arg(long = "upstream-cache", short)]
upstream_caches: Vec<String>,
/// Concurrent upstream cache checkers
#[arg(long, default_value_t = 32)]
upstream_checker_concurrency: u8,
/// Concurrent uploaders
#[arg(long, default_value_t = 4)]
uploader_concurrency: u8,
/// Concurrent nix-store commands to run
#[arg(long, default_value_t = 32)]
nix_store_concurrency: u8,
}
#[derive(Debug, Subcommand)]

View file

@ -72,7 +72,7 @@ impl NixCp {
let _permit = permits.acquire().await.unwrap();
if !path.check_upstream_hit(upstream_caches.as_slice()).await {
tx.send(path.to_string()).await.unwrap();
tx.send(path.absolute_path()).await.unwrap();
}
})
});

View file

@ -2,6 +2,8 @@ use std::{collections::HashSet, path::Path};
use anyhow::{Context, Result};
use log::trace;
use nix_compat::nixhash::CAHash;
use nix_compat::store_path::StorePath;
use regex::Regex;
use serde::{Deserialize, Serialize};
use tokio::process::Command;
@ -11,9 +13,11 @@ use url::Url;
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PathInfo {
deriver: String,
path: String,
pub deriver: StorePath<String>,
pub path: StorePath<String>,
signatures: Vec<String>,
pub references: Vec<StorePath<String>>,
pub ca: Option<CAHash>,
}
impl PathInfo {
/// get PathInfo for a package or a store path
@ -34,7 +38,7 @@ impl PathInfo {
.arg("--query")
.arg("--requisites")
.arg("--include-outputs")
.arg(&self.deriver)
.arg(self.deriver.to_string())
.output()
.await
.expect("nix-store cmd failed");
@ -48,10 +52,6 @@ impl PathInfo {
Ok(closure)
}
pub fn get_path(&self) -> &Path {
&Path::new(&self.path)
}
/// checks if the path is signed by any upstream. if it is, we assume a cache hit.
/// the name of the cache in the signature does not have to be the domain of the cache.
/// in fact, it can be any random string. but, most often it is, and this saves us
@ -81,8 +81,8 @@ impl PathInfo {
}
pub async fn check_upstream_hit(&self, upstreams: &[Url]) -> bool {
let basename = self.get_path().file_name().unwrap().to_str().unwrap();
let hash = basename.split_once("-").unwrap().0;
let hash =
String::from_utf8(self.path.digest().to_vec()).expect("should be a valid string");
for upstream in upstreams {
let upstream = upstream
@ -101,14 +101,13 @@ impl PathInfo {
}
false
}
}
impl ToString for PathInfo {
fn to_string(&self) -> String {
self.path.clone()
pub fn absolute_path(&self) -> String {
self.path.to_absolute_path()
}
}
/*
#[cfg(test)]
mod tests {
use super::*;
@ -159,3 +158,4 @@ mod tests {
);
}
}
*/

79
src/uploader.rs Normal file
View file

@ -0,0 +1,79 @@
use anyhow::Result;
use async_compression::{Level, tokio::bufread::ZstdEncoder};
use ed25519_dalek;
use nix_compat::{
narinfo::{self, NarInfo},
nixbase32,
store_path::StorePath,
};
use sha2::{Digest, Sha256};
use std::fs;
use tokio::{io::AsyncReadExt, process::Command};
use crate::path_info::PathInfo;
pub struct Uploader {
signing_key: narinfo::SigningKey<ed25519_dalek::SigningKey>,
path: PathInfo,
compression: Option<String>,
}
impl Uploader {
pub fn new(key_file: &str, path: PathInfo) -> Result<Self> {
let key = fs::read_to_string(key_file)?;
let signing_key = narinfo::parse_keypair(key.as_str())?.0;
Ok(Self {
signing_key,
path,
// TODO: support other algorithms
compression: Some("zstd".to_string()),
})
}
pub async fn make_nar(&self) -> Result<Vec<u8>> {
Ok(Command::new("nix")
.arg("nar")
.arg("dump-path")
.arg(self.path.absolute_path())
.output()
.await?
.stdout)
}
pub fn narinfo_from_nar(&self, nar: &[u8]) -> Result<NarInfo> {
let mut hasher = Sha256::new();
hasher.update(nar);
let nar_hash: [u8; 32] = hasher.finalize().into();
let nar_info = NarInfo {
flags: narinfo::Flags::empty(),
store_path: self.path.path.as_ref(),
nar_hash,
nar_size: nar.len() as u64,
references: self.path.references.iter().map(StorePath::as_ref).collect(),
signatures: Vec::new(),
ca: self.path.ca.clone(),
system: None,
deriver: Some(self.path.deriver.as_ref()),
compression: self.compression.as_ref().map(String::as_str),
file_hash: None,
file_size: None,
url: "",
};
Ok(nar_info)
}
fn nar_url(&self, compressed_nar_hash: &[u8]) -> String {
let compressed_nar_hash = nixbase32::encode(compressed_nar_hash);
format!("nar/{compressed_nar_hash}.nar.zst")
}
async fn compress_nar(&self, nar: &[u8]) -> Vec<u8> {
let mut encoder = ZstdEncoder::with_quality(nar, Level::Default);
let mut compressed = Vec::with_capacity(nar.len());
encoder
.read_to_end(&mut compressed)
.await
.expect("should compress just fine");
compressed
}
}