Compare commits

..

3 commits

Author SHA1 Message Date
cy
b0bcc21fc0
attic: try prefetching 16 chunks 2025-03-17 21:48:16 -04:00
cy
f6c6f24083
flake update 2025-03-17 21:47:21 -04:00
cy
fba0d4120d
workflow: always() cache 2025-03-17 21:38:31 -04:00
3 changed files with 14 additions and 5 deletions

View file

@ -63,13 +63,17 @@ jobs:
run: |
nix profile install github:zhaofengli/attic
attic login cy7 https://cache.cy7.sh "$ATTIC_TOKEN"
- name: build and cache
- name: build
run: |
package=".#nixosConfigurations."${{ matrix.machine }}".config.system.build.toplevel"
nix build -L "$package"
- name: cache
if: always()
run: |
package=".#nixosConfigurations."${{ matrix.machine }}".config.system.build.toplevel"
derivation="$(nix path-info --derivation "$package")"
cache="$(nix-store --query --requisites --include-outputs "$derivation")"
attic push main --stdin <<< "$cache"
xargs attic push main <<< "$cache"
build-homes:
strategy:
fail-fast: false
@ -119,10 +123,14 @@ jobs:
run: |
nix profile install github:zhaofengli/attic
attic login cy7 https://cache.cy7.sh "$ATTIC_TOKEN"
- name: build and cache
- name: build
run: |
package=".#homeConfigurations."${{ matrix.home }}".activationPackage"
nix build -L "$package"
- name: cache
if: always()
run: |
package=".#homeConfigurations."${{ matrix.home }}".activationPackage"
derivation="$(nix path-info --derivation "$package")"
cache="$(nix-store --query --requisites --include-outputs "$derivation")"
attic push main --stdin <<< "$cache"

View file

@ -42,10 +42,11 @@ jobs:
attic login cy7 https://cache.cy7.sh "$ATTIC_TOKEN"
- run: nix build -L ${{ matrix.package }}
- name: cache result
if: always()
run: |
derivation="$(nix path-info --derivation "${{ matrix.package }}")"
cache="$(nix-store --query --requisites --include-outputs "$derivation")"
attic push main --stdin <<< "$cache"
xargs attic push main <<< "$cache"
- name: prepare tarball to upload
run: nix run github:nixos/nixpkgs#gnutar hcvf result.tar result
- name: upload result

View file

@ -7,7 +7,7 @@ index 02e4857..71eeee8 100644
// TODO: Make num_prefetch configurable
// The ideal size depends on the average chunk size
- let merged = merge_chunks(chunks, streamer, storage, 2).map_err(|e| {
+ let merged = merge_chunks(chunks, streamer, storage, 32).map_err(|e| {
+ let merged = merge_chunks(chunks, streamer, storage, 16).map_err(|e| {
tracing::error!(%e, "Stream error");
e
});