reformat and stuff ig
This commit is contained in:
parent
70739d7dcf
commit
f51099b911
7 changed files with 279 additions and 118 deletions
162
src/main.rs
162
src/main.rs
|
@ -1,4 +1,5 @@
|
|||
#![feature(let_chains)]
|
||||
#![feature(extend_one)]
|
||||
|
||||
use std::path::Path;
|
||||
use std::sync::{
|
||||
|
@ -6,92 +7,39 @@ use std::sync::{
|
|||
atomic::{AtomicUsize, Ordering},
|
||||
};
|
||||
|
||||
use clap::Parser;
|
||||
use anyhow::Result;
|
||||
use clap::{Parser, Subcommand};
|
||||
use log::{debug, trace};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::process::Command;
|
||||
use tokio::sync::{Semaphore, mpsc};
|
||||
|
||||
const UPSTREAM_CACHES: &[&str] = &["https://cache.nixos.org"];
|
||||
use nixcp::NixCp;
|
||||
|
||||
// nix path-info --derivation --json
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct PathInfo {
|
||||
ca: String,
|
||||
nar_hash: String,
|
||||
nar_size: u32,
|
||||
path: String,
|
||||
references: Vec<String>,
|
||||
registration_time: u32,
|
||||
valid: bool,
|
||||
}
|
||||
mod cli;
|
||||
mod nixcp;
|
||||
mod path_info;
|
||||
|
||||
impl PathInfo {
|
||||
// find derivations related to package
|
||||
async fn from_package(package: &str, recursive: bool) -> Vec<Self> {
|
||||
let mut args = vec!["path-info", "--derivation", "--json"];
|
||||
if recursive {
|
||||
args.push("--recursive");
|
||||
}
|
||||
let path_infos = Command::new("nix")
|
||||
.args(args)
|
||||
.arg(package)
|
||||
.output()
|
||||
.await
|
||||
.expect("path-info failed");
|
||||
|
||||
let path_infos: Vec<PathInfo> = serde_json::from_slice(&path_infos.stdout)
|
||||
.expect("no derivations found for this package");
|
||||
debug!("PathInfo's from nix path-info: {:#?}", path_infos);
|
||||
path_infos
|
||||
}
|
||||
|
||||
// find store paths related to derivation
|
||||
async fn get_store_paths(&self) -> Vec<String> {
|
||||
let mut store_paths: Vec<String> = Vec::new();
|
||||
let nix_store_cmd = Command::new("nix-store")
|
||||
.arg("--query")
|
||||
.arg("--requisites")
|
||||
.arg("--include-outputs")
|
||||
.arg(&self.path)
|
||||
.output()
|
||||
.await
|
||||
.expect("nix-store cmd failed");
|
||||
|
||||
let nix_store_out = String::from_utf8(nix_store_cmd.stdout).unwrap();
|
||||
for store_path in nix_store_out.split_whitespace().map(ToString::to_string) {
|
||||
store_paths.push(store_path);
|
||||
}
|
||||
store_paths
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(version, about, long_about = None)]
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(version, name = "nixcp")]
|
||||
struct Cli {
|
||||
/// Package to upload to the binary cache
|
||||
package: String,
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
|
||||
/// Address of the binary cache (passed to nix copy --to)
|
||||
#[arg(long, value_name = "BINARY CACHE")]
|
||||
to: String,
|
||||
|
||||
/// Upstream cache to check against. Can be specified multiple times.
|
||||
/// cache.nixos.org is always included
|
||||
#[arg(long, short)]
|
||||
upstream_cache: Vec<String>,
|
||||
|
||||
/// Whether to pass --recursive to nix path-info. Can queue a huge number of paths to upload
|
||||
#[arg(long, short, default_value_t = false)]
|
||||
recursive: bool,
|
||||
/// cache.nixos.org is always included (unless --no-nixos-cache is passed)
|
||||
#[arg(long = "upstream-cache", short)]
|
||||
upstream_caches: Vec<String>,
|
||||
|
||||
/// Concurrent upstream cache checkers
|
||||
#[arg(long, default_value_t = 32)]
|
||||
upstream_checker_concurrency: u8,
|
||||
|
||||
/// Concurrent uploaders
|
||||
#[arg(long, default_value_t = 16)]
|
||||
#[arg(long, default_value_t = 4)]
|
||||
uploader_concurrency: u8,
|
||||
|
||||
/// Concurrent nix-store commands to run
|
||||
|
@ -99,67 +47,45 @@ struct Cli {
|
|||
nix_store_concurrency: u8,
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
enum Commands {
|
||||
Push {
|
||||
/// Package or store path to upload
|
||||
/// e.g. nixpkgs#hello or /nix/store/y4qpcibkj767szhjb58i2sidmz8m24hb-hello-2.12.1
|
||||
package: String,
|
||||
},
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
async fn main() -> Result<()> {
|
||||
env_logger::init();
|
||||
let cli = Cli::parse();
|
||||
let package = &cli.package;
|
||||
let binary_cache = cli.to;
|
||||
let mut upstream_caches = cli.upstream_cache;
|
||||
for upstream in UPSTREAM_CACHES {
|
||||
upstream_caches.push(upstream.to_string());
|
||||
let mut nixcp = NixCp::new();
|
||||
nixcp.add_upstreams(&cli.upstream_caches)?;
|
||||
|
||||
match &cli.command {
|
||||
Commands::Push { package } => {
|
||||
nixcp.paths_from_package(package).await?;
|
||||
}
|
||||
}
|
||||
debug!("package: {}", package);
|
||||
debug!("binary cache: {}", binary_cache);
|
||||
debug!("upstream caches: {:#?}", upstream_caches);
|
||||
|
||||
println!("querying nix path-info");
|
||||
let derivations = PathInfo::from_package(package, cli.recursive).await;
|
||||
println!("got {} derivations", derivations.len());
|
||||
Ok(())
|
||||
|
||||
println!("querying nix-store");
|
||||
let mut handles = Vec::new();
|
||||
let concurrency = Arc::new(Semaphore::new(cli.nix_store_concurrency.into()));
|
||||
let store_paths = Arc::new(RwLock::new(Vec::new()));
|
||||
/*
|
||||
let (cacheable_tx, mut cacheable_rx) = mpsc::channel(cli.uploader_concurrency.into());
|
||||
|
||||
for derivation in derivations {
|
||||
let store_paths = Arc::clone(&store_paths);
|
||||
let permit = Arc::clone(&concurrency);
|
||||
println!("spawning check_upstream");
|
||||
|
||||
println!("spawning uploader");
|
||||
handles.push(tokio::spawn(async move {
|
||||
let _permit = permit.acquire_owned().await.unwrap();
|
||||
let paths = derivation.get_store_paths().await;
|
||||
store_paths.write().unwrap().extend(paths);
|
||||
uploader(&mut cacheable_rx, binary_cache, cli.uploader_concurrency).await;
|
||||
}));
|
||||
}
|
||||
// resolve store paths for all derivations before we move on
|
||||
for handle in handles {
|
||||
handle.await.unwrap();
|
||||
}
|
||||
println!("got {} store paths", store_paths.read().unwrap().len());
|
||||
|
||||
let (cacheable_tx, mut cacheable_rx) = mpsc::channel(cli.uploader_concurrency.into());
|
||||
|
||||
println!("spawning check_upstream");
|
||||
handles = Vec::new();
|
||||
handles.push(tokio::spawn(async move {
|
||||
check_upstream(
|
||||
store_paths,
|
||||
cacheable_tx,
|
||||
cli.upstream_checker_concurrency,
|
||||
Arc::new(upstream_caches),
|
||||
)
|
||||
.await;
|
||||
}));
|
||||
|
||||
println!("spawning uploader");
|
||||
handles.push(tokio::spawn(async move {
|
||||
uploader(&mut cacheable_rx, binary_cache, cli.uploader_concurrency).await;
|
||||
}));
|
||||
|
||||
// make sure all threads are done
|
||||
for handle in handles {
|
||||
handle.await.unwrap();
|
||||
}
|
||||
// make sure all threads are done
|
||||
for handle in handles {
|
||||
handle.await.unwrap();
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
// filter out store paths that exist in upstream caches
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue