make cli and other nice things
This commit is contained in:
parent
92f7edfba4
commit
aa6b94949b
3 changed files with 118 additions and 18 deletions
53
Cargo.lock
generated
53
Cargo.lock
generated
|
@ -142,6 +142,46 @@ version = "1.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "clap"
|
||||||
|
version = "4.5.34"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e958897981290da2a852763fe9cdb89cd36977a5d729023127095fa94d95e2ff"
|
||||||
|
dependencies = [
|
||||||
|
"clap_builder",
|
||||||
|
"clap_derive",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "clap_builder"
|
||||||
|
version = "4.5.34"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "83b0f35019843db2160b5bb19ae09b4e6411ac33fc6a712003c33e03090e2489"
|
||||||
|
dependencies = [
|
||||||
|
"anstream",
|
||||||
|
"anstyle",
|
||||||
|
"clap_lex",
|
||||||
|
"strsim",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "clap_derive"
|
||||||
|
version = "4.5.32"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7"
|
||||||
|
dependencies = [
|
||||||
|
"heck",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "clap_lex"
|
||||||
|
version = "0.7.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "colorchoice"
|
name = "colorchoice"
|
||||||
version = "1.0.3"
|
version = "1.0.3"
|
||||||
|
@ -352,6 +392,12 @@ version = "0.15.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
|
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "heck"
|
||||||
|
version = "0.5.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "http"
|
name = "http"
|
||||||
version = "1.3.1"
|
version = "1.3.1"
|
||||||
|
@ -753,6 +799,7 @@ dependencies = [
|
||||||
name = "nixcp"
|
name = "nixcp"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"clap",
|
||||||
"env_logger",
|
"env_logger",
|
||||||
"log",
|
"log",
|
||||||
"reqwest",
|
"reqwest",
|
||||||
|
@ -1200,6 +1247,12 @@ version = "1.2.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
|
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "strsim"
|
||||||
|
version = "0.11.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "subtle"
|
name = "subtle"
|
||||||
version = "2.6.1"
|
version = "2.6.1"
|
||||||
|
|
|
@ -4,6 +4,7 @@ version = "0.1.0"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
clap = { version = "4.5.34", features = ["derive"] }
|
||||||
env_logger = "0.11.7"
|
env_logger = "0.11.7"
|
||||||
log = "0.4.27"
|
log = "0.4.27"
|
||||||
reqwest = "0.12.15"
|
reqwest = "0.12.15"
|
||||||
|
|
82
src/main.rs
82
src/main.rs
|
@ -1,18 +1,18 @@
|
||||||
#![feature(let_chains)]
|
#![feature(let_chains)]
|
||||||
|
|
||||||
use std::process::{Command, Stdio};
|
use std::process::Command;
|
||||||
use std::sync::mpsc;
|
use std::sync::mpsc;
|
||||||
use std::{env, path::Path};
|
use std::path::Path;
|
||||||
|
use std::sync::{Mutex, Arc, atomic::{AtomicUsize, Ordering}};
|
||||||
|
|
||||||
use log::{debug, trace};
|
use log::{debug, trace};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json;
|
use serde_json;
|
||||||
use tokio::sync::Semaphore;
|
use tokio::sync::Semaphore;
|
||||||
|
use clap::Parser;
|
||||||
|
|
||||||
const UPSTREAM_CACHES: &'static [&'static str] = &[
|
const UPSTREAM_CACHES: &'static [&'static str] = &[
|
||||||
"https://cache.nixos.org",
|
"https://cache.nixos.org",
|
||||||
"https://nix-community.cachix.org",
|
|
||||||
"https://nixcache.cy7.sh",
|
|
||||||
];
|
];
|
||||||
|
|
||||||
// nix path-info --derivation --json
|
// nix path-info --derivation --json
|
||||||
|
@ -63,12 +63,43 @@ impl PathInfo {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(version, about, long_about = None)]
|
||||||
|
struct Cli {
|
||||||
|
/// Package to upload to the binary cache
|
||||||
|
package: String,
|
||||||
|
|
||||||
|
/// Address of the binary cache (passed to nix copy --to)
|
||||||
|
#[arg(long, value_name = "BINARY CACHE")]
|
||||||
|
to: String,
|
||||||
|
|
||||||
|
/// Upstream cache to check against. Can be specified multiple times.
|
||||||
|
/// cache.nixos.org is always included
|
||||||
|
#[arg(long, short)]
|
||||||
|
upstream_cache: Vec<String>,
|
||||||
|
|
||||||
|
/// Concurrent upstream cache checkers
|
||||||
|
#[arg(long, default_value_t = 50)]
|
||||||
|
upstream_checker_concurrency: u8,
|
||||||
|
|
||||||
|
/// Concurrent uploaders
|
||||||
|
#[arg(long, default_value_t = 10)]
|
||||||
|
uploader_concurrency: u8,
|
||||||
|
}
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() {
|
async fn main() {
|
||||||
env_logger::init();
|
env_logger::init();
|
||||||
let args: Vec<String> = env::args().collect();
|
let cli = Cli::parse();
|
||||||
let package = &args[1];
|
let package = &cli.package;
|
||||||
|
let binary_cache = cli.to;
|
||||||
|
let mut upstream_caches = cli.upstream_cache;
|
||||||
|
for upstream in UPSTREAM_CACHES {
|
||||||
|
upstream_caches.push(upstream.to_string());
|
||||||
|
}
|
||||||
debug!("package: {}", package);
|
debug!("package: {}", package);
|
||||||
|
debug!("binary cache: {}", binary_cache);
|
||||||
|
debug!("upstream caches: {:#?}", upstream_caches);
|
||||||
|
|
||||||
println!("querying nix path-info");
|
println!("querying nix path-info");
|
||||||
let path_infos = PathInfo::from_package(package);
|
let path_infos = PathInfo::from_package(package);
|
||||||
|
@ -78,15 +109,15 @@ async fn main() {
|
||||||
let (cacheable_tx, cacheable_rx) = mpsc::channel();
|
let (cacheable_tx, cacheable_rx) = mpsc::channel();
|
||||||
|
|
||||||
let mut handles = Vec::new();
|
let mut handles = Vec::new();
|
||||||
|
|
||||||
println!("spawning check_upstream");
|
println!("spawning check_upstream");
|
||||||
handles.push(tokio::spawn(async move {
|
handles.push(tokio::spawn(async move {
|
||||||
check_upstream(store_paths, cacheable_tx).await;
|
check_upstream(store_paths, cacheable_tx, cli.upstream_checker_concurrency, upstream_caches).await;
|
||||||
}));
|
}));
|
||||||
|
|
||||||
println!("spawning uploader");
|
println!("spawning uploader");
|
||||||
handles.push(tokio::spawn(async move {
|
handles.push(tokio::spawn(async move {
|
||||||
uploader(cacheable_rx).await;
|
uploader(cacheable_rx, binary_cache, cli.uploader_concurrency).await;
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// make sure all threads are done
|
// make sure all threads are done
|
||||||
|
@ -96,8 +127,8 @@ async fn main() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// filter out store paths that exist in upstream caches
|
// filter out store paths that exist in upstream caches
|
||||||
async fn check_upstream(store_paths: Vec<String>, cacheable_tx: mpsc::Sender<String>) {
|
async fn check_upstream(store_paths: Vec<String>, cacheable_tx: mpsc::Sender<String>, concurrency: u8, upstream_caches: Vec<String>) {
|
||||||
let concurrent = Semaphore::new(50);
|
let concurrent = Semaphore::new(concurrency.into());
|
||||||
for store_path in store_paths {
|
for store_path in store_paths {
|
||||||
let _ = concurrent.acquire().await.unwrap();
|
let _ = concurrent.acquire().await.unwrap();
|
||||||
let tx = cacheable_tx.clone();
|
let tx = cacheable_tx.clone();
|
||||||
|
@ -122,7 +153,7 @@ async fn check_upstream(store_paths: Vec<String>, cacheable_tx: mpsc::Sender<Str
|
||||||
.map(|x| x.status());
|
.map(|x| x.status());
|
||||||
|
|
||||||
if let Ok(res_status) = res_status && res_status.is_success() {
|
if let Ok(res_status) = res_status && res_status.is_success() {
|
||||||
println!("{} was a hit upstream: {}", store_path, upstream);
|
debug!("{} was a hit upstream: {}", store_path, upstream);
|
||||||
hit = true;
|
hit = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -135,26 +166,32 @@ async fn check_upstream(store_paths: Vec<String>, cacheable_tx: mpsc::Sender<Str
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn uploader(cacheable_rx: mpsc::Receiver<String>) {
|
async fn uploader(cacheable_rx: mpsc::Receiver<String>, binary_cache: String, concurrency: u8) {
|
||||||
let mut count = 0;
|
let upload_count = Arc::new(AtomicUsize::new(0));
|
||||||
let concurrent = Semaphore::new(10);
|
let failures: Arc<Mutex<Vec<String>>> = Arc::new(Mutex::new(Vec::new()));
|
||||||
|
let concurrent = Semaphore::new(concurrency.into());
|
||||||
let mut handles = Vec::new();
|
let mut handles = Vec::new();
|
||||||
loop {
|
loop {
|
||||||
if let Ok(path_to_upload) = cacheable_rx.recv() {
|
if let Ok(path_to_upload) = cacheable_rx.recv() {
|
||||||
let _ = concurrent.acquire().await.unwrap();
|
let _ = concurrent.acquire().await.unwrap();
|
||||||
|
let failures = Arc::clone(&failures);
|
||||||
|
let binary_cache = binary_cache.clone();
|
||||||
|
let upload_count = Arc::clone(&upload_count);
|
||||||
|
|
||||||
handles.push(tokio::spawn(async move {
|
handles.push(tokio::spawn(async move {
|
||||||
println!("uploading: {}", path_to_upload);
|
println!("uploading: {}", path_to_upload);
|
||||||
if Command::new("nix")
|
if Command::new("nix")
|
||||||
.arg("copy")
|
.arg("copy")
|
||||||
.arg("--to")
|
.arg("--to")
|
||||||
.arg("s3://nixcache?endpoint=s3.cy7.sh&secret-key=/home/yt/cache-priv-key.pem")
|
.arg(&binary_cache.to_string())
|
||||||
.arg(&path_to_upload)
|
.arg(&path_to_upload)
|
||||||
.output()
|
.output()
|
||||||
.is_err()
|
.is_err()
|
||||||
{
|
{
|
||||||
println!("WARN: upload failed: {}", path_to_upload);
|
println!("WARN: upload failed: {}", path_to_upload);
|
||||||
|
failures.lock().unwrap().push(path_to_upload);
|
||||||
} else {
|
} else {
|
||||||
count += 1;
|
upload_count.fetch_add(1, Ordering::Relaxed);
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
} else {
|
} else {
|
||||||
|
@ -162,7 +199,16 @@ async fn uploader(cacheable_rx: mpsc::Receiver<String>) {
|
||||||
for handle in handles {
|
for handle in handles {
|
||||||
handle.await.unwrap();
|
handle.await.unwrap();
|
||||||
}
|
}
|
||||||
println!("uploaded {} paths", count);
|
println!("uploaded {} paths", upload_count.load(Ordering::Relaxed));
|
||||||
|
|
||||||
|
let failures = failures.lock().unwrap();
|
||||||
|
if !failures.is_empty() {
|
||||||
|
println!("failed to upload these paths: ");
|
||||||
|
for failure in failures.iter() {
|
||||||
|
print!("{}", failure);
|
||||||
|
}
|
||||||
|
println!();
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue