style: Switch to nixfmt from nixpkgs-fmt

Most of the ecosystem has moved to this formatter,
and many people configured their editors to autoformat it with this formatter.

Closes: https://git.snix.dev/snix/snix/issues/62
Change-Id: Icf39e7836c91fc2ae49fbe22a40a639105bfb0bd
Reviewed-on: https://cl.snix.dev/c/snix/+/30671
Reviewed-by: Florian Klink <flokli@flokli.de>
Tested-by: besadii
Autosubmit: Ilan Joselevich <personal@ilanjoselevich.com>
This commit is contained in:
Ilan Joselevich 2025-08-09 21:08:41 +02:00
parent 3443e6bd08
commit 91d02d8c84
136 changed files with 39952 additions and 11007 deletions

View file

@ -1,6 +1,11 @@
# See README.md # See README.md
{ depot ? import ../. { }, ... }: {
depot ? import ../. { },
...
}:
depot.third_party.nixpkgs.extend (_: _: { depot.third_party.nixpkgs.extend (
tvl = depot; _: _: {
}) tvl = depot;
}
)

View file

@ -1,6 +1,7 @@
{ depot {
, pkgs depot,
, ... pkgs,
...
}: }:
let let
@ -13,27 +14,33 @@ let
''; '';
# clickhouse has a very odd AWS config concept. # clickhouse has a very odd AWS config concept.
# Configure it to be a bit more sane. # Configure it to be a bit more sane.
clickhouseLocalFixedAWS = pkgs.runCommand "clickhouse-local-fixed" clickhouseLocalFixedAWS =
{ pkgs.runCommand "clickhouse-local-fixed"
nativeBuildInputs = [ pkgs.makeWrapper ]; {
} '' nativeBuildInputs = [ pkgs.makeWrapper ];
mkdir -p $out/bin }
makeWrapper ${pkgs.clickhouse}/bin/clickhouse-local $out/bin/clickhouse-local \ ''
--append-flags "-C ${clickhouseConfigAWS}" mkdir -p $out/bin
''; makeWrapper ${pkgs.clickhouse}/bin/clickhouse-local $out/bin/clickhouse-local \
--append-flags "-C ${clickhouseConfigAWS}"
'';
in in
depot.nix.readTree.drvTargets { depot.nix.readTree.drvTargets {
inherit clickhouseLocalFixedAWS; inherit clickhouseLocalFixedAWS;
parse-bucket-logs = pkgs.runCommand "archivist-parse-bucket-logs" parse-bucket-logs =
{ pkgs.runCommand "archivist-parse-bucket-logs"
nativeBuildInputs = [ pkgs.makeWrapper ]; {
} '' nativeBuildInputs = [ pkgs.makeWrapper ];
mkdir -p $out/bin }
makeWrapper ${(pkgs.writers.writeRust "parse-bucket-logs-unwrapped" {} ./parse_bucket_logs.rs)} $out/bin/archivist-parse-bucket-logs \ ''
--prefix PATH : ${pkgs.lib.makeBinPath [ clickhouseLocalFixedAWS ]} mkdir -p $out/bin
''; makeWrapper ${
(pkgs.writers.writeRust "parse-bucket-logs-unwrapped" { } ./parse_bucket_logs.rs)
} $out/bin/archivist-parse-bucket-logs \
--prefix PATH : ${pkgs.lib.makeBinPath [ clickhouseLocalFixedAWS ]}
'';
# A shell, by default pointing us to the archivist SSO profile / account by default. # A shell, by default pointing us to the archivist SSO profile / account by default.
shell = pkgs.mkShell { shell = pkgs.mkShell {

File diff suppressed because it is too large Load diff

View file

@ -1,4 +1,9 @@
{ pkgs, depot, lib, ... }: {
pkgs,
depot,
lib,
...
}:
(pkgs.callPackage ./Cargo.nix { (pkgs.callPackage ./Cargo.nix {
defaultCrateOverrides = (depot.snix.utils.defaultCrateOverridesForPkgs pkgs) // { defaultCrateOverrides = (depot.snix.utils.defaultCrateOverridesForPkgs pkgs) // {
@ -10,6 +15,7 @@
nativeBuildInputs = [ pkgs.protobuf ]; nativeBuildInputs = [ pkgs.protobuf ];
}; };
}; };
}).rootCrate.build.overrideAttrs { }).rootCrate.build.overrideAttrs
meta.ci.extraSteps.crate2nix-check = depot.snix.utils.mkCrate2nixCheck ./Cargo.nix; {
} meta.ci.extraSteps.crate2nix-check = depot.snix.utils.mkCrate2nixCheck ./Cargo.nix;
}

File diff suppressed because it is too large Load diff

View file

@ -6,6 +6,7 @@
src = depot.snix.utils.filterRustCrateSrc { root = prev.src.origSrc; }; src = depot.snix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
}; };
}; };
}).rootCrate.build.overrideAttrs { }).rootCrate.build.overrideAttrs
meta.ci.extraSteps.crate2nix = depot.snix.utils.mkCrate2nixCheck ./Cargo.nix; {
} meta.ci.extraSteps.crate2nix = depot.snix.utils.mkCrate2nixCheck ./Cargo.nix;
}

File diff suppressed because it is too large Load diff

View file

@ -6,6 +6,7 @@
src = depot.snix.utils.filterRustCrateSrc { root = prev.src.origSrc; }; src = depot.snix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
}; };
}; };
}).rootCrate.build.overrideAttrs { }).rootCrate.build.overrideAttrs
meta.ci.extraSteps.crate2nix = depot.snix.utils.mkCrate2nixCheck ./Cargo.nix; {
} meta.ci.extraSteps.crate2nix = depot.snix.utils.mkCrate2nixCheck ./Cargo.nix;
}

File diff suppressed because it is too large Load diff

View file

@ -1,4 +1,9 @@
{ pkgs, lib, depot, ... }: {
pkgs,
lib,
depot,
...
}:
let let
pkgsCross = pkgs.pkgsCross.wasm32-unknown-none; pkgsCross = pkgs.pkgsCross.wasm32-unknown-none;
in in
@ -8,25 +13,26 @@ in
src = depot.snix.utils.filterRustCrateSrc { root = prev.src.origSrc; }; src = depot.snix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
}; };
}; };
}).rootCrate.build.overrideAttrs (oldAttrs: { }).rootCrate.build.overrideAttrs
installPhase = '' (oldAttrs: {
${lib.getExe pkgs.wasm-bindgen-cli} \ installPhase = ''
--target web \ ${lib.getExe pkgs.wasm-bindgen-cli} \
--out-dir $out \ --target web \
--out-name ${oldAttrs.crateName} \ --out-dir $out \
--no-typescript \ --out-name ${oldAttrs.crateName} \
target/lib/${oldAttrs.crateName}-${oldAttrs.metadata}.wasm --no-typescript \
target/lib/${oldAttrs.crateName}-${oldAttrs.metadata}.wasm
mv src/*.{html,css} $out mv src/*.{html,css} $out
''; '';
passthru.serve = pkgs.writeShellScriptBin "snixbolt-serve" '' passthru.serve = pkgs.writeShellScriptBin "snixbolt-serve" ''
${lib.getExe pkgs.simple-http-server} \ ${lib.getExe pkgs.simple-http-server} \
--index \ --index \
--nocache \ --nocache \
"$@" \ "$@" \
${depot.contrib.snixbolt} ${depot.contrib.snixbolt}
''; '';
meta.ci.extraSteps.crate2nix-check = depot.snix.utils.mkCrate2nixCheck ./Cargo.nix; meta.ci.extraSteps.crate2nix-check = depot.snix.utils.mkCrate2nixCheck ./Cargo.nix;
}) })

File diff suppressed because it is too large Load diff

View file

@ -6,6 +6,7 @@
src = depot.snix.utils.filterRustCrateSrc { root = prev.src.origSrc; }; src = depot.snix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
}; };
}; };
}).rootCrate.build.overrideAttrs { }).rootCrate.build.overrideAttrs
meta.ci.extraSteps.crate2nix-check = depot.snix.utils.mkCrate2nixCheck ./Cargo.nix; {
} meta.ci.extraSteps.crate2nix-check = depot.snix.utils.mkCrate2nixCheck ./Cargo.nix;
}

File diff suppressed because it is too large Load diff

View file

@ -6,6 +6,7 @@
src = depot.snix.utils.filterRustCrateSrc { root = prev.src.origSrc; }; src = depot.snix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
}; };
}; };
}).rootCrate.build.overrideAttrs { }).rootCrate.build.overrideAttrs
meta.ci.extraSteps.crate2nix-check = depot.snix.utils.mkCrate2nixCheck ./Cargo.nix; {
} meta.ci.extraSteps.crate2nix-check = depot.snix.utils.mkCrate2nixCheck ./Cargo.nix;
}

View file

@ -2,28 +2,31 @@
# (see //nix/readTree for details) and constructing a matching attribute set # (see //nix/readTree for details) and constructing a matching attribute set
# tree. # tree.
{ nixpkgsBisectPath ? null {
, parentTargetMap ? null nixpkgsBisectPath ? null,
, nixpkgsConfig ? { } parentTargetMap ? null,
, localSystem ? builtins.currentSystem nixpkgsConfig ? { },
, crossSystem ? null localSystem ? builtins.currentSystem,
, ... crossSystem ? null,
...
}@args: }@args:
let let
readTree = import ./nix/readTree { }; readTree = import ./nix/readTree { };
readDepot = depotArgs: readTree { readDepot =
args = depotArgs; depotArgs:
path = ./.; readTree {
scopedArgs = { args = depotArgs;
# FIXME(Lix): this cannot work in Lix itself. path = ./.;
# __findFile = _: _: throw "Do not import from NIX_PATH in the depot!"; scopedArgs = {
builtins = builtins // { # FIXME(Lix): this cannot work in Lix itself.
currentSystem = throw "Use localSystem from the readTree args instead of builtins.currentSystem!"; # __findFile = _: _: throw "Do not import from NIX_PATH in the depot!";
builtins = builtins // {
currentSystem = throw "Use localSystem from the readTree args instead of builtins.currentSystem!";
};
}; };
}; };
};
# To determine build targets, we walk through the depot tree and # To determine build targets, we walk through the depot tree and
# fetch attributes that were imported by readTree and are buildable. # fetch attributes that were imported by readTree and are buildable.
@ -34,54 +37,66 @@ let
eligible = node: (node ? outPath) && !(node.meta.ci.skip or (node.meta.broken or false)); eligible = node: (node ? outPath) && !(node.meta.ci.skip or (node.meta.broken or false));
in in
readTree.fix (self: (readDepot { readTree.fix (
inherit localSystem crossSystem; self:
depot = self; (readDepot {
inherit localSystem crossSystem;
depot = self;
# Pass third_party as 'pkgs' (for compatibility with external # Pass third_party as 'pkgs' (for compatibility with external
# imports for certain subdirectories) # imports for certain subdirectories)
pkgs = self.third_party.nixpkgs; pkgs = self.third_party.nixpkgs;
# Expose lib attribute to packages. # Expose lib attribute to packages.
lib = self.third_party.nixpkgs.lib; lib = self.third_party.nixpkgs.lib;
# Pass arguments passed to the entire depot through, for packages # Pass arguments passed to the entire depot through, for packages
# that would like to add functionality based on this. # that would like to add functionality based on this.
# #
# Note that it is intended for exceptional circumstance, such as # Note that it is intended for exceptional circumstance, such as
# debugging by bisecting nixpkgs. # debugging by bisecting nixpkgs.
externalArgs = args; externalArgs = args;
}) // { })
# Make the path to the depot available for things that might need it // {
# (e.g. NixOS module inclusions) # Make the path to the depot available for things that might need it
path = self.third_party.nixpkgs.lib.cleanSourceWith { # (e.g. NixOS module inclusions)
name = "depot"; path = self.third_party.nixpkgs.lib.cleanSourceWith {
src = ./.; name = "depot";
filter = self.third_party.nixpkgs.lib.cleanSourceFilter; src = ./.;
}; filter = self.third_party.nixpkgs.lib.cleanSourceFilter;
};
# Additionally targets can be excluded from CI by adding them to the # Additionally targets can be excluded from CI by adding them to the
# list below. # list below.
ci.excluded = [ ci.excluded = [
]; ];
# List of all buildable targets, for CI purposes. # List of all buildable targets, for CI purposes.
# #
# Note: To prevent infinite recursion, this *must* be a nested # Note: To prevent infinite recursion, this *must* be a nested
# attribute set (which does not have a __readTree attribute). # attribute set (which does not have a __readTree attribute).
ci.targets = readTree.gather ci.targets = readTree.gather (t: (eligible t) && (!builtins.elem t self.ci.excluded)) (
(t: (eligible t) && (!builtins.elem t self.ci.excluded)) self
(self // { // {
# remove the pipelines themselves from the set over which to # remove the pipelines themselves from the set over which to
# generate pipelines because that also leads to infinite # generate pipelines because that also leads to infinite
# recursion. # recursion.
ops = self.ops // { pipelines = null; }; ops = self.ops // {
}); pipelines = null;
};
}
);
# Derivation that gcroots all depot targets. # Derivation that gcroots all depot targets.
ci.gcroot = with self.third_party.nixpkgs; writeText "depot-gcroot" ci.gcroot =
(builtins.concatStringsSep "\n" with self.third_party.nixpkgs;
(lib.flatten writeText "depot-gcroot" (
(map (p: map (o: p.${o}) p.outputs or [ ]) # list all outputs of each drv builtins.concatStringsSep "\n" (
self.ci.targets))); lib.flatten (
}) map (p: map (o: p.${o}) p.outputs or [ ]) # list all outputs of each drv
self.ci.targets
)
)
);
}
)

View file

@ -1,10 +1,12 @@
{ pkgs, ... }: { pkgs, ... }:
let let
mkWebroot = title: imgsrc: pkgs.runCommand "webroot" { } '' mkWebroot =
mkdir -p $out title: imgsrc:
title="${title}" substituteAll ${./index.html} $out/index.html pkgs.runCommand "webroot" { } ''
cp ${imgsrc} $out/solves-this.png mkdir -p $out
''; title="${title}" substituteAll ${./index.html} $out/index.html
cp ${imgsrc} $out/solves-this.png
'';
in in
{ {

View file

@ -1,9 +1,14 @@
# Check protobuf breaking. Lints already happen in individual targets. # Check protobuf breaking. Lints already happen in individual targets.
# #
{ depot, pkgs, lib, ... }: {
depot,
pkgs,
lib,
...
}:
let let
inherit (depot.nix) bufCheck;# self reference inherit (depot.nix) bufCheck; # self reference
script = pkgs.writeShellScriptBin "ci-buf-check" '' script = pkgs.writeShellScriptBin "ci-buf-check" ''
export PATH="$PATH:${pkgs.lib.makeBinPath [ pkgs.buf ]}" export PATH="$PATH:${pkgs.lib.makeBinPath [ pkgs.buf ]}"

View file

@ -1,7 +1,6 @@
{ makeSetupHook }: { makeSetupHook }:
makeSetupHook makeSetupHook {
{
name = "rules_java_bazel_hook"; name = "rules_java_bazel_hook";
substitutions = { substitutions = {
local_java = ./local_java; local_java = ./local_java;

View file

@ -1,13 +1,14 @@
{ stdenvNoCC {
, lib stdenvNoCC,
, makeSetupHook lib,
, fetchFromGitHub makeSetupHook,
, coreutils fetchFromGitHub,
, gnugrep coreutils,
, nodejs gnugrep,
, yarn nodejs,
, git yarn,
, cacert git,
cacert,
}: }:
let let
rulesNodeJS = stdenvNoCC.mkDerivation rec { rulesNodeJS = stdenvNoCC.mkDerivation rec {
@ -30,7 +31,12 @@ let
--replace-quiet '#!/usr/bin/env bash' '#!${stdenvNoCC.shell}' \ --replace-quiet '#!/usr/bin/env bash' '#!${stdenvNoCC.shell}' \
--replace-quiet '#!/bin/bash' '#!${stdenvNoCC.shell}' --replace-quiet '#!/bin/bash' '#!${stdenvNoCC.shell}'
done done
sed -i '/^#!/a export PATH=${lib.makeBinPath [ coreutils gnugrep ]}:$PATH' internal/node/launcher.sh sed -i '/^#!/a export PATH=${
lib.makeBinPath [
coreutils
gnugrep
]
}:$PATH' internal/node/launcher.sh
''; '';
installPhase = '' installPhase = ''
@ -38,8 +44,7 @@ let
''; '';
}; };
in in
makeSetupHook makeSetupHook {
{
name = "bazelbuild-rules_nodejs-5-hook"; name = "bazelbuild-rules_nodejs-5-hook";
propagatedBuildInputs = [ propagatedBuildInputs = [
nodejs nodejs
@ -48,7 +53,12 @@ makeSetupHook
cacert cacert
]; ];
substitutions = { substitutions = {
inherit nodejs yarn cacert rulesNodeJS; inherit
nodejs
yarn
cacert
rulesNodeJS
;
local_node = ./local_node; local_node = ./local_node;
local_yarn = ./local_yarn; local_yarn = ./local_yarn;
}; };

View file

@ -1,17 +1,19 @@
{ stdenv {
, lib stdenv,
, pkgs lib,
, coreutils pkgs,
coreutils,
}: }:
{ name ? "${baseAttrs.pname}-${baseAttrs.version}" {
, bazelTargets name ? "${baseAttrs.pname}-${baseAttrs.version}",
, bazel ? pkgs.bazel bazelTargets,
, depsHash bazel ? pkgs.bazel,
, extraCacheInstall ? "" depsHash,
, extraBuildSetup ? "" extraCacheInstall ? "",
, extraBuildInstall ? "" extraBuildSetup ? "",
, ... extraBuildInstall ? "",
...
}@baseAttrs: }@baseAttrs:
let let
@ -24,20 +26,23 @@ let
]; ];
attrs = cleanAttrs baseAttrs; attrs = cleanAttrs baseAttrs;
base = stdenv.mkDerivation (attrs // { base = stdenv.mkDerivation (
nativeBuildInputs = (attrs.nativeBuildInputs or [ ]) ++ [ attrs
bazel // {
]; nativeBuildInputs = (attrs.nativeBuildInputs or [ ]) ++ [
bazel
];
preUnpack = '' preUnpack = ''
if [[ ! -d $HOME ]]; then if [[ ! -d $HOME ]]; then
export HOME=$NIX_BUILD_TOP/home export HOME=$NIX_BUILD_TOP/home
mkdir -p $HOME mkdir -p $HOME
fi fi
''; '';
bazelTargetNames = builtins.attrNames bazelTargets; bazelTargetNames = builtins.attrNames bazelTargets;
}); }
);
cache = base.overrideAttrs (base: { cache = base.overrideAttrs (base: {
name = "${name}-deps"; name = "${name}-deps";
@ -89,18 +94,23 @@ let
installPhase = '' installPhase = ''
runHook preInstall runHook preInstall
${builtins.concatStringsSep "\n" (lib.mapAttrsToList (target: outPath: lib.optionalString (outPath != null) '' ${builtins.concatStringsSep "\n" (
TARGET_OUTPUTS="$(bazel cquery --repository_cache=$cache/repository-cache $bazelFlags "''${bazelFlagsArray[@]}" --output=files "${target}")" lib.mapAttrsToList (
if [[ "$(echo "$TARGET_OUTPUTS" | wc -l)" -gt 1 ]]; then target: outPath:
echo "Installing ${target}'s outputs ($TARGET_OUTPUTS) into ${outPath} as a directory" lib.optionalString (outPath != null) ''
mkdir -p "${outPath}" TARGET_OUTPUTS="$(bazel cquery --repository_cache=$cache/repository-cache $bazelFlags "''${bazelFlagsArray[@]}" --output=files "${target}")"
cp $TARGET_OUTPUTS "${outPath}" if [[ "$(echo "$TARGET_OUTPUTS" | wc -l)" -gt 1 ]]; then
else echo "Installing ${target}'s outputs ($TARGET_OUTPUTS) into ${outPath} as a directory"
echo "Installing ${target}'s output ($TARGET_OUTPUTS) to ${outPath}" mkdir -p "${outPath}"
mkdir -p "${dirOf outPath}" cp $TARGET_OUTPUTS "${outPath}"
cp "$TARGET_OUTPUTS" "${outPath}" else
fi echo "Installing ${target}'s output ($TARGET_OUTPUTS) to ${outPath}"
'') bazelTargets)} mkdir -p "${dirOf outPath}"
cp "$TARGET_OUTPUTS" "${outPath}"
fi
''
) bazelTargets
)}
${extraBuildInstall} ${extraBuildInstall}
runHook postInstall runHook postInstall

View file

@ -1,6 +1,7 @@
{ pkgs, ... }: { pkgs, ... }:
(pkgs.callPackage ./buildBazelPackageNG.nix { }) // { (pkgs.callPackage ./buildBazelPackageNG.nix { })
// {
bazelRulesJavaHook = pkgs.callPackage ./bazelRulesJavaHook { }; bazelRulesJavaHook = pkgs.callPackage ./bazelRulesJavaHook { };
bazelRulesNodeJS5Hook = pkgs.callPackage ./bazelRulesNodeJS5Hook { }; bazelRulesNodeJS5Hook = pkgs.callPackage ./bazelRulesNodeJS5Hook { };
} }

View file

@ -22,7 +22,8 @@ let
listToAttrs listToAttrs
mapAttrs mapAttrs
toJSON toJSON
unsafeDiscardStringContext; unsafeDiscardStringContext
;
inherit (pkgs) lib runCommand writeText; inherit (pkgs) lib runCommand writeText;
inherit (depot.nix.readTree) mkLabel; inherit (depot.nix.readTree) mkLabel;
@ -33,24 +34,27 @@ rec {
# Create a unique key for the buildkite pipeline based on the given derivation # Create a unique key for the buildkite pipeline based on the given derivation
# or drvPath. A consequence of using such keys is that every derivation may # or drvPath. A consequence of using such keys is that every derivation may
# only be exposed as a single, unique step in the pipeline. # only be exposed as a single, unique step in the pipeline.
keyForDrv = drvOrPath: keyForDrv =
drvOrPath:
let let
drvPath = drvPath =
if lib.isDerivation drvOrPath then drvOrPath.drvPath if lib.isDerivation drvOrPath then
else if lib.isString drvOrPath then drvOrPath drvOrPath.drvPath
else builtins.throw "keyForDrv: expected string or derivation"; else if lib.isString drvOrPath then
drvOrPath
else
builtins.throw "keyForDrv: expected string or derivation";
# Only use the drv hash to prevent escaping problems. Buildkite also has a # Only use the drv hash to prevent escaping problems. Buildkite also has a
# limit of 100 characters on keys. # limit of 100 characters on keys.
in in
"drv-" + (builtins.substring 0 32 "drv-" + (builtins.substring 0 32 (builtins.baseNameOf (unsafeDiscardStringContext drvPath)));
(builtins.baseNameOf (unsafeDiscardStringContext drvPath))
);
# Given an arbitrary attribute path generate a Nix expression which obtains # Given an arbitrary attribute path generate a Nix expression which obtains
# this from the root of depot (assumed to be ./.). Attributes may be any # this from the root of depot (assumed to be ./.). Attributes may be any
# Nix strings suitable as attribute names, not just Nix literal-safe strings. # Nix strings suitable as attribute names, not just Nix literal-safe strings.
mkBuildExpr = attrPath: mkBuildExpr =
attrPath:
let let
descend = expr: attr: "builtins.getAttr \"${attr}\" (${expr})"; descend = expr: attr: "builtins.getAttr \"${attr}\" (${expr})";
in in
@ -58,38 +62,49 @@ rec {
# Determine whether to skip a target if it has not diverged from the # Determine whether to skip a target if it has not diverged from the
# HEAD branch. # HEAD branch.
shouldSkip = { parentTargetMap ? { }, label, drvPath }: shouldSkip =
if (hasAttr label parentTargetMap) && parentTargetMap."${label}".drvPath == drvPath {
then "Target has not changed." parentTargetMap ? { },
else false; label,
drvPath,
}:
if (hasAttr label parentTargetMap) && parentTargetMap."${label}".drvPath == drvPath then
"Target has not changed."
else
false;
# Create build command for an attribute path pointing to a derivation. # Create build command for an attribute path pointing to a derivation.
mkBuildCommand = { attrPath, drvPath, outLink ? "result" }: concatStringsSep " " [ mkBuildCommand =
# If the nix build fails, the Nix command's exit status should be used. {
"set -o pipefail;" attrPath,
drvPath,
outLink ? "result",
}:
concatStringsSep " " [
# If the nix build fails, the Nix command's exit status should be used.
"set -o pipefail;"
# First try to realise the drvPath of the target so we don't evaluate twice. # First try to realise the drvPath of the target so we don't evaluate twice.
# Nix has no concept of depending on a derivation file without depending on # Nix has no concept of depending on a derivation file without depending on
# at least one of its `outPath`s, so we need to discard the string context # at least one of its `outPath`s, so we need to discard the string context
# if we don't want to build everything during pipeline construction. # if we don't want to build everything during pipeline construction.
# #
# To make this more uniform with how nix-build(1) works, we call realpath(1) # To make this more uniform with how nix-build(1) works, we call realpath(1)
# on nix-store(1)'s output since it has the habit of printing the path of the # on nix-store(1)'s output since it has the habit of printing the path of the
# out link, not the store path. # out link, not the store path.
"(nix-store --realise '${drvPath}' --add-root '${outLink}' --indirect | xargs -r realpath)" "(nix-store --realise '${drvPath}' --add-root '${outLink}' --indirect | xargs -r realpath)"
# Since we don't gcroot the derivation files, they may be deleted by the # Since we don't gcroot the derivation files, they may be deleted by the
# garbage collector. In that case we can reevaluate and build the attribute # garbage collector. In that case we can reevaluate and build the attribute
# using nix-build. # using nix-build.
"|| (test ! -f '${drvPath}' && nix-build -E '${mkBuildExpr attrPath}' --show-trace --out-link '${outLink}')" "|| (test ! -f '${drvPath}' && nix-build -E '${mkBuildExpr attrPath}' --show-trace --out-link '${outLink}')"
]; ];
# Attribute path of a target relative to the depot root. Needs to take into # Attribute path of a target relative to the depot root. Needs to take into
# account whether the target is a physical target (which corresponds to a path # account whether the target is a physical target (which corresponds to a path
# in the filesystem) or the subtarget of a physical target. # in the filesystem) or the subtarget of a physical target.
targetAttrPath = target: targetAttrPath =
target.__readTree target: target.__readTree ++ lib.optionals (target ? __subtarget) [ target.__subtarget ];
++ lib.optionals (target ? __subtarget) [ target.__subtarget ];
# Given a derivation (identified by drvPath) that is part of the list of # Given a derivation (identified by drvPath) that is part of the list of
# targets passed to mkPipeline, determine all derivations that it depends on # targets passed to mkPipeline, determine all derivations that it depends on
@ -97,11 +112,18 @@ rec {
# that build them. This is used to populate `depends_on` in `mkStep`. # that build them. This is used to populate `depends_on` in `mkStep`.
# #
# See //nix/dependency-analyzer for documentation on the structure of `targetDepMap`. # See //nix/dependency-analyzer for documentation on the structure of `targetDepMap`.
getTargetPipelineDeps = targetDepMap: drvPath: getTargetPipelineDeps =
builtins.map keyForDrv (targetDepMap.${drvPath}.knownDeps or [ ]); targetDepMap: drvPath: builtins.map keyForDrv (targetDepMap.${drvPath}.knownDeps or [ ]);
# Create a pipeline step from a single target. # Create a pipeline step from a single target.
mkStep = { headBranch, parentTargetMap, targetDepMap, target, cancelOnBuildFailing }: mkStep =
{
headBranch,
parentTargetMap,
targetDepMap,
target,
cancelOnBuildFailing,
}:
let let
label = mkLabel target; label = mkLabel target;
drvPath = unsafeDiscardStringContext target.drvPath; drvPath = unsafeDiscardStringContext target.drvPath;
@ -120,24 +142,34 @@ rec {
# Add a dependency on the initial static pipeline step which # Add a dependency on the initial static pipeline step which
# always runs. This allows build steps uploaded in batches to # always runs. This allows build steps uploaded in batches to
# start running before all batches have been uploaded. # start running before all batches have been uploaded.
depends_on = [ ":init:" ] depends_on = [
":init:"
]
++ getTargetPipelineDeps targetDepMap drvPath ++ getTargetPipelineDeps targetDepMap drvPath
++ lib.optionals (target ? meta.ci.buildkiteExtraDeps) target.meta.ci.buildkiteExtraDeps; ++ lib.optionals (target ? meta.ci.buildkiteExtraDeps) target.meta.ci.buildkiteExtraDeps;
} // lib.optionalAttrs (target ? meta.timeout) { }
// lib.optionalAttrs (target ? meta.timeout) {
timeout_in_minutes = target.meta.timeout / 60; timeout_in_minutes = target.meta.timeout / 60;
# Additional arguments to set on the step. # Additional arguments to set on the step.
# Keep in mind these *overwrite* existing step args, not extend. Use with caution. # Keep in mind these *overwrite* existing step args, not extend. Use with caution.
} // lib.optionalAttrs (target ? meta.ci.buildkiteExtraStepArgs) target.meta.ci.buildkiteExtraStepArgs; }
// lib.optionalAttrs (
target ? meta.ci.buildkiteExtraStepArgs
) target.meta.ci.buildkiteExtraStepArgs;
# Helper function to inelegantly divide a list into chunks of at # Helper function to inelegantly divide a list into chunks of at
# most n elements. # most n elements.
# #
# This works by assigning each element a chunk ID based on its # This works by assigning each element a chunk ID based on its
# index, and then grouping all elements by their chunk ID. # index, and then grouping all elements by their chunk ID.
chunksOf = n: list: chunksOf =
n: list:
let let
chunkId = idx: toString (idx / n + 1); chunkId = idx: toString (idx / n + 1);
assigned = lib.imap1 (idx: value: { inherit value; chunk = chunkId idx; }) list; assigned = lib.imap1 (idx: value: {
inherit value;
chunk = chunkId idx;
}) list;
unchunk = mapAttrs (_: elements: map (e: e.value) elements); unchunk = mapAttrs (_: elements: map (e: e.value) elements);
in in
unchunk (lib.groupBy (e: e.chunk) assigned); unchunk (lib.groupBy (e: e.chunk) assigned);
@ -156,36 +188,35 @@ rec {
# are uploaded sequentially. This is because of a limitation in the # are uploaded sequentially. This is because of a limitation in the
# Buildkite backend which struggles to process more than a specific # Buildkite backend which struggles to process more than a specific
# number of chunks at once. # number of chunks at once.
pipelineChunks = name: steps: pipelineChunks = name: steps: attrValues (mapAttrs (makePipelineChunk name) (chunksOf 192 steps));
attrValues (mapAttrs (makePipelineChunk name) (chunksOf 192 steps));
# Create a pipeline structure for the given targets. # Create a pipeline structure for the given targets.
mkPipeline = mkPipeline =
{ {
# HEAD branch of the repository on which release steps, GC # HEAD branch of the repository on which release steps, GC
# anchoring and other "mainline only" steps should run. # anchoring and other "mainline only" steps should run.
headBranch headBranch,
, # List of derivations as read by readTree (in most cases just the # List of derivations as read by readTree (in most cases just the
# output of readTree.gather) that should be built in Buildkite. # output of readTree.gather) that should be built in Buildkite.
# #
# These are scheduled as the first build steps and run as fast as # These are scheduled as the first build steps and run as fast as
# possible, in order, without any concurrency restrictions. # possible, in order, without any concurrency restrictions.
drvTargets drvTargets,
, # Derivation map of a parent commit. Only targets which no longer # Derivation map of a parent commit. Only targets which no longer
# correspond to the content of this map will be built. Passing an # correspond to the content of this map will be built. Passing an
# empty map will always build all targets. # empty map will always build all targets.
parentTargetMap ? { } parentTargetMap ? { },
, # A list of plain Buildkite step structures to run alongside the # A list of plain Buildkite step structures to run alongside the
# build for all drvTargets, but before proceeding with any # build for all drvTargets, but before proceeding with any
# post-build actions such as status reporting. # post-build actions such as status reporting.
# #
# Can be used for things like code formatting checks. # Can be used for things like code formatting checks.
additionalSteps ? [ ] additionalSteps ? [ ],
, # A list of plain Buildkite step structures to run after all # A list of plain Buildkite step structures to run after all
# previous steps succeeded. # previous steps succeeded.
# #
# Can be used for status reporting steps and the like. # Can be used for status reporting steps and the like.
postBuildSteps ? [ ] postBuildSteps ? [ ],
# The list of phases known by the current Buildkite # The list of phases known by the current Buildkite
# pipeline. Dynamic pipeline chunks for each phase are uploaded # pipeline. Dynamic pipeline chunks for each phase are uploaded
# to Buildkite on execution of static part of the # to Buildkite on execution of static part of the
@ -199,7 +230,10 @@ rec {
# - "build" - main phase for building all Nix targets # - "build" - main phase for building all Nix targets
# - "release" - pushing artifacts to external repositories # - "release" - pushing artifacts to external repositories
# - "deploy" - updating external deployment configurations # - "deploy" - updating external deployment configurations
, phases ? [ "build" "release" ] phases ? [
"build"
"release"
],
# Build phases that are active for this invocation (i.e. their # Build phases that are active for this invocation (i.e. their
# steps should be generated). # steps should be generated).
# #
@ -208,13 +242,13 @@ rec {
# eval contexts. # eval contexts.
# #
# TODO(tazjin): Fail/warn if unknown phase is requested. # TODO(tazjin): Fail/warn if unknown phase is requested.
, activePhases ? phases activePhases ? phases,
# Setting this attribute to true cancels dynamic pipeline steps # Setting this attribute to true cancels dynamic pipeline steps
# as soon as the build is marked as failing. # as soon as the build is marked as failing.
# #
# To enable this feature one should enable "Fail Fast" setting # To enable this feature one should enable "Fail Fast" setting
# at Buildkite pipeline or on organization level. # at Buildkite pipeline or on organization level.
, cancelOnBuildFailing ? false cancelOnBuildFailing ? false,
}: }:
let let
# List of phases to include. # List of phases to include.
@ -232,20 +266,25 @@ rec {
# the previous pipeline (per parentTargetMap). Unchanged targets will # the previous pipeline (per parentTargetMap). Unchanged targets will
# be skipped (assumed already built), so it's useless to emit deps # be skipped (assumed already built), so it's useless to emit deps
# on their steps. # on their steps.
changedDrvTargets = builtins.filter changedDrvTargets = builtins.filter (
(target: target: parentTargetMap.${mkLabel target}.drvPath or null != target.drvPath
parentTargetMap.${mkLabel target}.drvPath or null != target.drvPath ) drvTargets;
)
drvTargets;
in in
dependency-analyzer (dependency-analyzer.drvsToPaths changedDrvTargets); dependency-analyzer (dependency-analyzer.drvsToPaths changedDrvTargets);
# Convert a target into all of its steps, separated by build # Convert a target into all of its steps, separated by build
# phase (as phases end up in different chunks). # phase (as phases end up in different chunks).
targetToSteps = target: targetToSteps =
target:
let let
mkStepArgs = { mkStepArgs = {
inherit headBranch parentTargetMap targetDepMap target cancelOnBuildFailing; inherit
headBranch
parentTargetMap
targetDepMap
target
cancelOnBuildFailing
;
}; };
step = mkStep mkStepArgs; step = mkStep mkStepArgs;
@ -257,19 +296,21 @@ rec {
overridable = f: mkStep (mkStepArgs // { target = (f target); }); overridable = f: mkStep (mkStepArgs // { target = (f target); });
# Split extra steps by phase. # Split extra steps by phase.
splitExtraSteps = lib.groupBy ({ phase, ... }: phase) splitExtraSteps = lib.groupBy ({ phase, ... }: phase) (
(attrValues (mapAttrs (normaliseExtraStep phases overridable) attrValues (mapAttrs (normaliseExtraStep phases overridable) (target.meta.ci.extraSteps or { }))
(target.meta.ci.extraSteps or { }))); );
extraSteps = mapAttrs extraSteps = mapAttrs (
(_: steps: _: steps: map (mkExtraStep (targetAttrPath target) buildEnabled) steps
map (mkExtraStep (targetAttrPath target) buildEnabled) steps) ) splitExtraSteps;
splitExtraSteps;
in in
if !buildEnabled then extraSteps if !buildEnabled then
else extraSteps // { extraSteps
build = [ step ] ++ (extraSteps.build or [ ]); else
}; extraSteps
// {
build = [ step ] ++ (extraSteps.build or [ ]);
};
# Combine all target steps into step lists per phase. # Combine all target steps into step lists per phase.
# #
@ -279,44 +320,47 @@ rec {
release = postBuildSteps; release = postBuildSteps;
}; };
phasesWithSteps = lib.zipAttrsWithNames enabledPhases (_: concatLists) phasesWithSteps = lib.zipAttrsWithNames enabledPhases (_: concatLists) (
((map targetToSteps drvTargets) ++ [ globalSteps ]); (map targetToSteps drvTargets) ++ [ globalSteps ]
);
# Generate pipeline chunks for each phase. # Generate pipeline chunks for each phase.
chunks = foldl' chunks = foldl' (
(acc: phase: acc: phase:
let phaseSteps = phasesWithSteps.${phase} or [ ]; in let
if phaseSteps == [ ] phaseSteps = phasesWithSteps.${phase} or [ ];
then acc in
else acc ++ (pipelineChunks phase phaseSteps)) if phaseSteps == [ ] then acc else acc ++ (pipelineChunks phase phaseSteps)
[ ] ) [ ] enabledPhases;
enabledPhases;
in in
runCommand "buildkite-pipeline" { } '' runCommand "buildkite-pipeline" { } ''
mkdir $out mkdir $out
echo "Generated ${toString (length chunks)} pipeline chunks" echo "Generated ${toString (length chunks)} pipeline chunks"
${ ${lib.concatMapStringsSep "\n" (chunk: "cp ${chunk.path} $out/${chunk.filename}") chunks}
lib.concatMapStringsSep "\n"
(chunk: "cp ${chunk.path} $out/${chunk.filename}") chunks
}
''; '';
# Create a drvmap structure for the given targets, containing the # Create a drvmap structure for the given targets, containing the
# mapping of all target paths to their derivations. The mapping can # mapping of all target paths to their derivations. The mapping can
# be persisted for future use. # be persisted for future use.
mkDrvmap = drvTargets: writeText "drvmap.json" (toJSON (listToAttrs (map mkDrvmap =
(target: { drvTargets:
name = mkLabel target; writeText "drvmap.json" (
value = { toJSON (
drvPath = unsafeDiscardStringContext target.drvPath; listToAttrs (
map (target: {
name = mkLabel target;
value = {
drvPath = unsafeDiscardStringContext target.drvPath;
# Include the attrPath in the output to reconstruct the drv # Include the attrPath in the output to reconstruct the drv
# without parsing the human-readable label. # without parsing the human-readable label.
attrPath = targetAttrPath target; attrPath = targetAttrPath target;
}; };
}) }) drvTargets
drvTargets))); )
)
);
# Implementation of extra step logic. # Implementation of extra step logic.
# #
@ -356,40 +400,49 @@ rec {
# Create a gated step in a step group, independent from any other # Create a gated step in a step group, independent from any other
# steps. # steps.
mkGatedStep = { step, label, parent, prompt }: { mkGatedStep =
inherit (step) depends_on; {
group = label; step,
skip = parent.skip or false; label,
parent,
prompt,
}:
{
inherit (step) depends_on;
group = label;
skip = parent.skip or false;
steps = [ steps = [
{ {
inherit prompt; inherit prompt;
branches = step.branches or [ ]; branches = step.branches or [ ];
block = ":radio_button: Run ${label}? (from ${parent.env.READTREE_TARGET})"; block = ":radio_button: Run ${label}? (from ${parent.env.READTREE_TARGET})";
} }
# The explicit depends_on of the wrapped step must be removed, # The explicit depends_on of the wrapped step must be removed,
# otherwise its dependency relationship with the gate step will # otherwise its dependency relationship with the gate step will
# break. # break.
(builtins.removeAttrs step [ "depends_on" ]) (builtins.removeAttrs step [ "depends_on" ])
]; ];
}; };
# Validate and normalise extra step configuration before actually # Validate and normalise extra step configuration before actually
# generating build steps, in order to use user-provided metadata # generating build steps, in order to use user-provided metadata
# during the pipeline generation. # during the pipeline generation.
normaliseExtraStep = phases: overridableParent: key: normaliseExtraStep =
{ command phases: overridableParent: key:
, label ? key {
, needsOutput ? false command,
, parentOverride ? (x: x) label ? key,
, branches ? null needsOutput ? false,
, alwaysRun ? false parentOverride ? (x: x),
, prompt ? false branches ? null,
, softFail ? false alwaysRun ? false,
, phase ? "build" prompt ? false,
, skip ? false softFail ? false,
, agents ? null phase ? "build",
skip ? false,
agents ? null,
}: }:
let let
parent = overridableParent parentOverride; parent = overridableParent parentOverride;
@ -401,8 +454,7 @@ rec {
Phase '${phase}' is not valid. Phase '${phase}' is not valid.
Known phases: ${concatStringsSep ", " phases} Known phases: ${concatStringsSep ", " phases}
'' '' phase;
phase;
in in
{ {
inherit inherit
@ -416,7 +468,8 @@ rec {
parentLabel parentLabel
softFail softFail
skip skip
agents; agents
;
phase = validPhase; phase = validPhase;
@ -426,13 +479,13 @@ rec {
The 'prompt' feature can not be used by steps in the "build" The 'prompt' feature can not be used by steps in the "build"
phase, because CI builds should not be gated on manual human phase, because CI builds should not be gated on manual human
approvals. approvals.
'' '' prompt;
prompt;
}; };
# Create the Buildkite configuration for an extra step, optionally # Create the Buildkite configuration for an extra step, optionally
# wrapping it in a gate group. # wrapping it in a gate group.
mkExtraStep = parentAttrPath: buildEnabled: cfg: mkExtraStep =
parentAttrPath: buildEnabled: cfg:
let let
# ATTN: needs to match an entry in .gitignore so that the tree won't get dirty # ATTN: needs to match an entry in .gitignore so that the tree won't get dirty
commandScriptLink = "nix-buildkite-extra-step-command-script"; commandScriptLink = "nix-buildkite-extra-step-command-script";
@ -453,15 +506,11 @@ rec {
in in
if cfg.alwaysRun then false else skip'; if cfg.alwaysRun then false else skip';
depends_on = lib.optional depends_on = lib.optional (buildEnabled && !cfg.alwaysRun && !cfg.needsOutput) cfg.parent.key;
(buildEnabled && !cfg.alwaysRun && !cfg.needsOutput)
cfg.parent.key;
command = '' command = ''
set -ueo pipefail set -ueo pipefail
${lib.optionalString cfg.needsOutput ${lib.optionalString cfg.needsOutput "echo '~~~ Preparing build output of ${cfg.parentLabel}'"}
"echo '~~~ Preparing build output of ${cfg.parentLabel}'"
}
${lib.optionalString cfg.needsOutput cfg.parent.command} ${lib.optionalString cfg.needsOutput cfg.parent.command}
echo '--- Building extra step script' echo '--- Building extra step script'
command_script="$(${ command_script="$(${
@ -469,9 +518,13 @@ rec {
assert builtins.length cfg.command.outputs == 1; assert builtins.length cfg.command.outputs == 1;
mkBuildCommand { mkBuildCommand {
# script is exposed at <parent>.meta.ci.extraSteps.<key>.command # script is exposed at <parent>.meta.ci.extraSteps.<key>.command
attrPath = attrPath = parentAttrPath ++ [
parentAttrPath "meta"
++ [ "meta" "ci" "extraSteps" cfg.key "command" ]; "ci"
"extraSteps"
cfg.key
"command"
];
drvPath = unsafeDiscardStringContext cfg.command.drvPath; drvPath = unsafeDiscardStringContext cfg.command.drvPath;
# make sure it doesn't conflict with result (from needsOutput) # make sure it doesn't conflict with result (from needsOutput)
outLink = commandScriptLink; outLink = commandScriptLink;
@ -483,17 +536,17 @@ rec {
''; '';
soft_fail = cfg.softFail; soft_fail = cfg.softFail;
} // (lib.optionalAttrs (cfg.agents != null) { inherit (cfg) agents; }) }
// (lib.optionalAttrs (cfg.agents != null) { inherit (cfg) agents; })
// (lib.optionalAttrs (cfg.branches != null) { // (lib.optionalAttrs (cfg.branches != null) {
branches = lib.concatStringsSep " " cfg.branches; branches = lib.concatStringsSep " " cfg.branches;
}); });
in in
if (isString cfg.prompt) if (isString cfg.prompt) then
then mkGatedStep {
mkGatedStep inherit step;
{ inherit (cfg) label parent prompt;
inherit step; }
inherit (cfg) label parent prompt; else
} step;
else step;
} }

View file

@ -1,4 +1,9 @@
{ lib, depot, pkgs, ... }: {
lib,
depot,
pkgs,
...
}:
let let
inherit (builtins) unsafeDiscardStringContext appendContext; inherit (builtins) unsafeDiscardStringContext appendContext;
@ -18,23 +23,22 @@ let
directDrvDeps = directDrvDeps =
let let
getDeps = getDeps =
if lib.versionAtLeast builtins.nixVersion "2.6" if lib.versionAtLeast builtins.nixVersion "2.6" then
then # Since https://github.com/NixOS/nix/pull/1643, Nix apparently »preserves
# Since https://github.com/NixOS/nix/pull/1643, Nix apparently »preserves # string context« through a readFile invocation. This has the side effect
# string context« through a readFile invocation. This has the side effect # that it becomes possible to query the actual references a store path has.
# that it becomes possible to query the actual references a store path has. # Not a 100% sure this is intended, but _very_ convenient for us here.
# Not a 100% sure this is intended, but _very_ convenient for us here. drvPath: builtins.attrNames (builtins.getContext (builtins.readFile drvPath))
drvPath:
builtins.attrNames (builtins.getContext (builtins.readFile drvPath))
else else
# For Nix < 2.6 we have to rely on HACK, namely grepping for quoted # For Nix < 2.6 we have to rely on HACK, namely grepping for quoted
# store path references in the file. In the future this should be # store path references in the file. In the future this should be
# replaced by a proper derivation parser. # replaced by a proper derivation parser.
drvPath: builtins.concatLists ( drvPath:
builtins.concatLists (
builtins.filter builtins.isList ( builtins.filter builtins.isList (
builtins.split builtins.split "\"(${lib.escapeRegex builtins.storeDir}/[[:alnum:]+._?=-]+.drv)\"" (
"\"(${lib.escapeRegex builtins.storeDir}/[[:alnum:]+._?=-]+.drv)\"" builtins.readFile drvPath
(builtins.readFile drvPath) )
) )
); );
in in
@ -42,15 +46,12 @@ let
# if the passed path is not a derivation we can't necessarily get its # if the passed path is not a derivation we can't necessarily get its
# dependencies, since it may not be representable as a Nix string due to # dependencies, since it may not be representable as a Nix string due to
# NUL bytes, e.g. compressed patch files imported into the Nix store. # NUL bytes, e.g. compressed patch files imported into the Nix store.
if builtins.match "^.+\\.drv$" drvPath == null if builtins.match "^.+\\.drv$" drvPath == null then [ ] else getDeps drvPath;
then [ ]
else getDeps drvPath;
# Maps a list of derivation to the list of corresponding `drvPath`s. # Maps a list of derivation to the list of corresponding `drvPath`s.
# #
# Type: [drv] -> [str] # Type: [drv] -> [str]
drvsToPaths = drvs: drvsToPaths = drvs: builtins.map (drv: builtins.unsafeDiscardOutputDependency drv.drvPath) drvs;
builtins.map (drv: builtins.unsafeDiscardOutputDependency drv.drvPath) drvs;
# #
# Calculate map of direct derivation dependencies # Calculate map of direct derivation dependencies
@ -62,7 +63,8 @@ let
# generating the map from # generating the map from
# #
# Type: bool -> string -> set # Type: bool -> string -> set
drvEntry = known: drvPath: drvEntry =
known: drvPath:
let let
# key may not refer to a store path, … # key may not refer to a store path, …
key = unsafeDiscardStringContext drvPath; key = unsafeDiscardStringContext drvPath;
@ -85,7 +87,8 @@ let
# attribute to `true` if it is in the list of input derivation paths. # attribute to `true` if it is in the list of input derivation paths.
# #
# Type: [str] -> set # Type: [str] -> set
plainDrvDepMap = drvPaths: plainDrvDepMap =
drvPaths:
builtins.listToAttrs ( builtins.listToAttrs (
builtins.genericClosure { builtins.genericClosure {
startSet = builtins.map (drvEntry true) drvPaths; startSet = builtins.map (drvEntry true) drvPaths;
@ -121,13 +124,15 @@ let
# `fmap (builtins.getAttr "knownDeps") (getAttr drvPath)` will always succeed. # `fmap (builtins.getAttr "knownDeps") (getAttr drvPath)` will always succeed.
# #
# Type: str -> stateMonad drvDepMap null # Type: str -> stateMonad drvDepMap null
insertKnownDeps = drvPathWithContext: insertKnownDeps =
drvPathWithContext:
let let
# We no longer need to read from the store, so context is irrelevant, but # We no longer need to read from the store, so context is irrelevant, but
# we need to check for attr names which requires the absence of context. # we need to check for attr names which requires the absence of context.
drvPath = unsafeDiscardStringContext drvPathWithContext; drvPath = unsafeDiscardStringContext drvPathWithContext;
in in
bind get (initDepMap: bind get (
initDepMap:
# Get the dependency map's state before we've done anything to obtain the # Get the dependency map's state before we've done anything to obtain the
# entry we'll be manipulating later as well as its dependencies. # entry we'll be manipulating later as well as its dependencies.
let let
@ -135,57 +140,48 @@ let
# We don't need to recurse if our direct dependencies either have their # We don't need to recurse if our direct dependencies either have their
# knownDeps list already populated or are known dependencies themselves. # knownDeps list already populated or are known dependencies themselves.
depsPrecalculated = depsPrecalculated = builtins.partition (
builtins.partition dep: initDepMap.${dep}.known || initDepMap.${dep} ? knownDeps
(dep: ) entryPoint.deps;
initDepMap.${dep}.known
|| initDepMap.${dep} ? knownDeps
)
entryPoint.deps;
# If a direct dependency is known, it goes right to our known dependency # If a direct dependency is known, it goes right to our known dependency
# list. If it is unknown, we can copy its knownDeps list into our own. # list. If it is unknown, we can copy its knownDeps list into our own.
initiallyKnownDeps = initiallyKnownDeps = builtins.concatLists (
builtins.concatLists ( builtins.map (
builtins.map dep: if initDepMap.${dep}.known then [ dep ] else initDepMap.${dep}.knownDeps
(dep: ) depsPrecalculated.right
if initDepMap.${dep}.known );
then [ dep ]
else initDepMap.${dep}.knownDeps
)
depsPrecalculated.right
);
in in
# If the information was already calculated before, we can exit right away # If the information was already calculated before, we can exit right away
if entryPoint ? knownDeps if entryPoint ? knownDeps then
then pure null pure null
else else
after after
# For all unknown direct dependencies which don't have a `knownDeps` # For all unknown direct dependencies which don't have a `knownDeps`
# list, we call ourselves recursively to populate it. Since this is # list, we call ourselves recursively to populate it. Since this is
# done sequentially in the state monad, we avoid recalculating the # done sequentially in the state monad, we avoid recalculating the
# list for the same derivation multiple times. # list for the same derivation multiple times.
(for_ (for_ depsPrecalculated.wrong insertKnownDeps)
depsPrecalculated.wrong
insertKnownDeps)
# After this we can obtain the updated dependency map which will have # After this we can obtain the updated dependency map which will have
# a `knownDeps` list for all our direct dependencies and update the # a `knownDeps` list for all our direct dependencies and update the
# entry for the input `drvPath`. # entry for the input `drvPath`.
(bind (
get bind get (
(populatedDepMap: populatedDepMap:
(setAttr drvPath (entryPoint // { (setAttr drvPath (
knownDeps = entryPoint
lib.unique ( // {
knownDeps = lib.unique (
initiallyKnownDeps initiallyKnownDeps
++ builtins.concatLists ( ++ builtins.concatLists (
builtins.map builtins.map (dep: populatedDepMap.${dep}.knownDeps) depsPrecalculated.wrong
(dep: populatedDepMap.${dep}.knownDeps)
depsPrecalculated.wrong
) )
); );
})))) }
))
)
)
); );
# This function puts it all together and is exposed via `__functor`. # This function puts it all together and is exposed via `__functor`.
@ -204,14 +200,8 @@ let
# */ # */
# ]; # ];
# } # }
knownDrvDepMap = knownDrvPaths: knownDrvDepMap =
run knownDrvPaths: run (plainDrvDepMap knownDrvPaths) (after (for_ knownDrvPaths insertKnownDeps) get);
(plainDrvDepMap knownDrvPaths)
(after
(for_
knownDrvPaths
insertKnownDeps)
get);
# #
# Other things based on knownDrvDepMap # Other things based on knownDrvDepMap
@ -221,39 +211,39 @@ let
# name, so multiple entries can be collapsed if they have the same name. # name, so multiple entries can be collapsed if they have the same name.
# #
# Type: [drv] -> drv # Type: [drv] -> drv
knownDependencyGraph = name: drvs: knownDependencyGraph =
name: drvs:
let let
justName = drvPath: justName =
builtins.substring drvPath:
(builtins.stringLength builtins.storeDir + 1 + 32 + 1) builtins.substring (
(builtins.stringLength drvPath) builtins.stringLength builtins.storeDir + 1 + 32 + 1
(unsafeDiscardStringContext drvPath); ) (builtins.stringLength drvPath) (unsafeDiscardStringContext drvPath);
gv = pkgs.writeText "${name}-dependency-analysis.gv" '' gv = pkgs.writeText "${name}-dependency-analysis.gv" ''
digraph depot { digraph depot {
${ ${
(lib.concatStringsSep "\n" (lib.concatStringsSep "\n" (
(lib.mapAttrsToList (name: value: lib.mapAttrsToList (
if !value.known then "" name: value:
else lib.concatMapStringsSep "\n" if !value.known then
(knownDep: " \"${justName name}\" -> \"${justName knownDep}\"") ""
value.knownDeps else
) lib.concatMapStringsSep "\n" (
(depot.nix.dependency-analyzer ( knownDep: " \"${justName name}\" -> \"${justName knownDep}\""
drvsToPaths drvs ) value.knownDeps
)))) ) (depot.nix.dependency-analyzer (drvsToPaths drvs))
))
} }
} }
''; '';
in in
pkgs.runCommand "${name}-dependency-analysis.svg" pkgs.runCommand "${name}-dependency-analysis.svg" {
{ nativeBuildInputs = [
nativeBuildInputs = [ pkgs.buildPackages.graphviz
pkgs.buildPackages.graphviz ];
]; } "dot -Tsvg < ${gv} > $out";
}
"dot -Tsvg < ${gv} > $out";
in in
{ {

View file

@ -4,8 +4,8 @@ let
# e.g. # e.g.
# a"b\c -> "a\"b\\c" # a"b\c -> "a\"b\\c"
# a\"bc -> "a\\\"bc" # a\"bc -> "a\\\"bc"
escapeExeclineArg = arg: escapeExeclineArg =
''"${builtins.replaceStrings [ ''"'' ''\'' ] [ ''\"'' ''\\'' ] (toString arg)}"''; arg: ''"${builtins.replaceStrings [ ''"'' ''\'' ] [ ''\"'' ''\\'' ] (toString arg)}"'';
# Escapes an execline (list of execline strings) to be passed to execlineb # Escapes an execline (list of execline strings) to be passed to execlineb
# Give it a nested list of strings. Nested lists are interpolated as execline # Give it a nested list of strings. Nested lists are interpolated as execline
@ -15,15 +15,24 @@ let
# Example: # Example:
# escapeExecline [ "if" [ "somecommand" ] "true" ] # escapeExecline [ "if" [ "somecommand" ] "true" ]
# == ''"if" { "somecommand" } "true"'' # == ''"if" { "somecommand" } "true"''
escapeExecline = execlineList: lib.concatStringsSep " " escapeExecline =
( execlineList:
lib.concatStringsSep " " (
let let
go = arg: go =
if builtins.isString arg then [ (escapeExeclineArg arg) ] arg:
else if builtins.isPath arg then [ (escapeExeclineArg "${arg}") ] if builtins.isString arg then
else if lib.isDerivation arg then [ (escapeExeclineArg arg) ] [ (escapeExeclineArg arg) ]
else if builtins.isList arg then [ "{" ] ++ builtins.concatMap go arg ++ [ "}" ] else if builtins.isPath arg then
else abort "escapeExecline can only hande nested lists of strings, was ${lib.generators.toPretty {} arg}"; [ (escapeExeclineArg "${arg}") ]
else if lib.isDerivation arg then
[ (escapeExeclineArg arg) ]
else if builtins.isList arg then
[ "{" ] ++ builtins.concatMap go arg ++ [ "}" ]
else
abort "escapeExecline can only hande nested lists of strings, was ${
lib.generators.toPretty { } arg
}";
in in
builtins.concatMap go execlineList builtins.concatMap go execlineList
); );

View file

@ -17,75 +17,82 @@ let
# Create the case statement for a command invocations, optionally # Create the case statement for a command invocations, optionally
# overriding the `TARGET_TOOL` variable. # overriding the `TARGET_TOOL` variable.
invoke = name: { attr, cmd ? null }: '' invoke =
${name}) name:
attr="${attr}" {
${if cmd != null then "TARGET_TOOL=\"${cmd}\"\n;;" else ";;"} attr,
''; cmd ? null,
}:
''
${name})
attr="${attr}"
${if cmd != null then "TARGET_TOOL=\"${cmd}\"\n;;" else ";;"}
'';
# Create command to symlink to the dispatch script for each tool. # Create command to symlink to the dispatch script for each tool.
link = name: "ln -s $target $out/bin/${name}"; link = name: "ln -s $target $out/bin/${name}";
invocations = tools: concatStringsSep "\n" (attrValues (mapAttrs invoke tools)); invocations = tools: concatStringsSep "\n" (attrValues (mapAttrs invoke tools));
in in
fix (self: fix (
self:
# Attribute set of tools that should be lazily-added to the $PATH. # Attribute set of tools that should be lazily-added to the $PATH.
# #
# The name of each attribute is used as the command name (on $PATH). # The name of each attribute is used as the command name (on $PATH).
# It must contain the keys 'attr' (containing the Nix attribute path # It must contain the keys 'attr' (containing the Nix attribute path
# to the tool's derivation from the top-level), and may optionally # to the tool's derivation from the top-level), and may optionally
# contain the key 'cmd' to override the name of the binary inside the # contain the key 'cmd' to override the name of the binary inside the
# derivation. # derivation.
tools: tools:
pkgs.runCommandNoCC "lazy-dispatch" pkgs.runCommandNoCC "lazy-dispatch"
{ {
passthru.overrideDeps = newTools: self (tools // newTools); passthru.overrideDeps = newTools: self (tools // newTools);
passthru.tools = tools; passthru.tools = tools;
text = '' text = ''
#!${pkgs.runtimeShell} #!${pkgs.runtimeShell}
set -ue set -ue
if ! type git>/dev/null || ! type nix-build>/dev/null; then if ! type git>/dev/null || ! type nix-build>/dev/null; then
echo "The 'git' and 'nix-build' commands must be available." >&2 echo "The 'git' and 'nix-build' commands must be available." >&2
exit 127 exit 127
fi fi
readonly REPO_ROOT=$(git rev-parse --show-toplevel) readonly REPO_ROOT=$(git rev-parse --show-toplevel)
TARGET_TOOL=$(basename "$0") TARGET_TOOL=$(basename "$0")
case "''${TARGET_TOOL}" in case "''${TARGET_TOOL}" in
${invocations tools} ${invocations tools}
*) *)
echo "''${TARGET_TOOL} is currently not installed in this repository." >&2 echo "''${TARGET_TOOL} is currently not installed in this repository." >&2
exit 127 exit 127
;; ;;
esac esac
result=$(nix-build --no-out-link --attr "''${attr}" "''${REPO_ROOT}") result=$(nix-build --no-out-link --attr "''${attr}" "''${REPO_ROOT}")
PATH="''${result}/bin:$PATH" PATH="''${result}/bin:$PATH"
exec "''${TARGET_TOOL}" "''${@}" exec "''${TARGET_TOOL}" "''${@}"
''; '';
# Access this to get a compatible nix-shell # Access this to get a compatible nix-shell
passthru.devShell = pkgs.mkShellNoCC { passthru.devShell = pkgs.mkShellNoCC {
name = "${self.name}-shell"; name = "${self.name}-shell";
packages = [ self ]; packages = [ self ];
}; };
} }
'' ''
# Write the dispatch code # Write the dispatch code
target=$out/bin/__dispatch target=$out/bin/__dispatch
mkdir -p "$(dirname "$target")" mkdir -p "$(dirname "$target")"
echo "$text" > $target echo "$text" > $target
chmod +x $target chmod +x $target
# Add symlinks from all the tools to the dispatch # Add symlinks from all the tools to the dispatch
${concatStringsSep "\n" (map link (attrNames tools))} ${concatStringsSep "\n" (map link (attrNames tools))}
# Check that it's working-ish # Check that it's working-ish
${pkgs.stdenv.shellDryRun} $target ${pkgs.stdenv.shellDryRun} $target
'' ''
) )

View file

@ -32,25 +32,30 @@ let
map map
match match
readDir readDir
substring; substring
;
argsWithPath = args: parts: argsWithPath =
let meta.locatedAt = parts; args: parts:
in meta // (if isAttrs args then args else args meta); let
meta.locatedAt = parts;
in
meta // (if isAttrs args then args else args meta);
readDirVisible = path: readDirVisible =
path:
let let
children = readDir path; children = readDir path;
# skip hidden files, except for those that contain special instructions to readTree # skip hidden files, except for those that contain special instructions to readTree
isVisible = f: f == ".skip-subtree" || f == ".skip-tree" || (substring 0 1 f) != "."; isVisible = f: f == ".skip-subtree" || f == ".skip-tree" || (substring 0 1 f) != ".";
names = filter isVisible (attrNames children); names = filter isVisible (attrNames children);
in in
listToAttrs (map listToAttrs (
(name: { map (name: {
inherit name; inherit name;
value = children.${name}; value = children.${name};
}) }) names
names); );
# Create a mark containing the location of this attribute and # Create a mark containing the location of this attribute and
# a list of all child attribute names added by readTree. # a list of all child attribute names added by readTree.
@ -60,39 +65,48 @@ let
}; };
# Create a label from a target's tree location. # Create a label from a target's tree location.
mkLabel = target: mkLabel =
let label = concatStringsSep "/" target.__readTree; target:
in if target ? __subtarget let
then "${label}:${target.__subtarget}" label = concatStringsSep "/" target.__readTree;
else label; in
if target ? __subtarget then "${label}:${target.__subtarget}" else label;
# Merge two attribute sets, but place attributes in `passthru` via # Merge two attribute sets, but place attributes in `passthru` via
# `overrideAttrs` for derivation targets that support it. # `overrideAttrs` for derivation targets that support it.
merge = a: b: merge =
if a ? overrideAttrs a: b:
then if a ? overrideAttrs then
a.overrideAttrs a.overrideAttrs (prev: {
(prev: { passthru = (prev.passthru or { }) // b;
passthru = (prev.passthru or { }) // b; })
}) else
else a // b; a // b;
# Import a file and enforce our calling convention # Import a file and enforce our calling convention
importFile = args: scopedArgs: path: parts: filter: importFile =
args: scopedArgs: path: parts: filter:
let let
importedFile = importedFile =
if scopedArgs != { } && builtins ? scopedImport # For snix if
then builtins.scopedImport scopedArgs path scopedArgs != { } && builtins ? scopedImport # For snix
else import path; then
builtins.scopedImport scopedArgs path
else
import path;
pathType = builtins.typeOf importedFile; pathType = builtins.typeOf importedFile;
in in
if pathType != "lambda" if pathType != "lambda" then
then throw "readTree: trying to import ${toString path}, but its a ${pathType}, you need to make it a function like { depot, pkgs, ... }" throw "readTree: trying to import ${toString path}, but its a ${pathType}, you need to make it a function like { depot, pkgs, ... }"
else importedFile (filter parts (argsWithPath args parts)); else
importedFile (filter parts (argsWithPath args parts));
nixFileName = file: nixFileName =
let res = match "(.*)\\.nix" file; file:
in if res == null then null else head res; let
res = match "(.*)\\.nix" file;
in
if res == null then null else head res;
# Internal implementation of readTree, which handles things like the # Internal implementation of readTree, which handles things like the
# skipping of trees and subtrees. # skipping of trees and subtrees.
@ -105,7 +119,15 @@ let
# The higher-level `readTree` method assembles the final attribute # The higher-level `readTree` method assembles the final attribute
# set out of these results at the top-level, and the internal # set out of these results at the top-level, and the internal
# `children` implementation unwraps and processes nested trees. # `children` implementation unwraps and processes nested trees.
readTreeImpl = { args, initPath, rootDir, parts, argsFilter, scopedArgs }: readTreeImpl =
{
args,
initPath,
rootDir,
parts,
argsFilter,
scopedArgs,
}:
let let
dir = readDirVisible initPath; dir = readDirVisible initPath;
@ -123,9 +145,10 @@ let
joinChild = c: initPath + ("/" + c); joinChild = c: initPath + ("/" + c);
self = self =
if rootDir if rootDir then
then { __readTree = [ ]; } { __readTree = [ ]; }
else importFile (args // { here = result; }) scopedArgs initPath parts argsFilter; else
importFile (args // { here = result; }) scopedArgs initPath parts argsFilter;
# Import subdirectories of the current one, unless any skip # Import subdirectories of the current one, unless any skip
# instructions exist. # instructions exist.
@ -134,88 +157,93 @@ let
# should be ignored, but its content is not inspected by # should be ignored, but its content is not inspected by
# readTree # readTree
filterDir = f: dir."${f}" == "directory"; filterDir = f: dir."${f}" == "directory";
filteredChildren = map filteredChildren = map (c: {
(c: { name = c;
name = c; value = readTreeImpl {
value = readTreeImpl { inherit argsFilter scopedArgs;
inherit argsFilter scopedArgs; args = args;
args = args; initPath = (joinChild c);
initPath = (joinChild c); rootDir = false;
rootDir = false; parts = (parts ++ [ c ]);
parts = (parts ++ [ c ]); };
}; }) (filter filterDir (attrNames dir));
})
(filter filterDir (attrNames dir));
# Remove skipped children from the final set, and unwrap the # Remove skipped children from the final set, and unwrap the
# result set. # result set.
children = children =
if skipSubtree then [ ] if skipSubtree then
else map ({ name, value }: { inherit name; value = value.ok; }) (filter (child: child.value ? ok) filteredChildren); [ ]
else
map (
{ name, value }:
{
inherit name;
value = value.ok;
}
) (filter (child: child.value ? ok) filteredChildren);
# Import Nix files # Import Nix files
nixFiles = nixFiles = if skipSubtree then [ ] else filter (f: f != null) (map nixFileName (attrNames dir));
if skipSubtree then [ ] nixChildren = map (
else filter (f: f != null) (map nixFileName (attrNames dir)); c:
nixChildren = map let
(c: p = joinChild (c + ".nix");
let childParts = parts ++ [ c ];
p = joinChild (c + ".nix"); imported = importFile (args // { here = result; }) scopedArgs p childParts argsFilter;
childParts = parts ++ [ c ]; in
imported = importFile (args // { here = result; }) scopedArgs p childParts argsFilter; {
in name = c;
{ value = if isAttrs imported then merge imported (marker childParts { }) else imported;
name = c; }
value = ) nixFiles;
if isAttrs imported
then merge imported (marker childParts { })
else imported;
})
nixFiles;
nodeValue = if dir ? "default.nix" then self else { }; nodeValue = if dir ? "default.nix" then self else { };
allChildren = listToAttrs ( allChildren = listToAttrs (if dir ? "default.nix" then children else nixChildren ++ children);
if dir ? "default.nix"
then children
else nixChildren ++ children
);
result = result =
if isAttrs nodeValue if isAttrs nodeValue then
then merge nodeValue (allChildren // (marker parts allChildren)) merge nodeValue (allChildren // (marker parts allChildren))
else nodeValue; else
nodeValue;
in in
if skipTree if skipTree then
then { skip = true; } { skip = true; }
else { else
ok = result; {
}; ok = result;
};
# Top-level implementation of readTree itself. # Top-level implementation of readTree itself.
readTree = args: readTree =
args:
let let
tree = readTreeImpl args; tree = readTreeImpl args;
in in
if tree ? skip if tree ? skip then
then throw "Top-level folder has a .skip-tree marker and could not be read by readTree!" throw "Top-level folder has a .skip-tree marker and could not be read by readTree!"
else tree.ok; else
tree.ok;
# Helper function to fetch subtargets from a target. This is a # Helper function to fetch subtargets from a target. This is a
# temporary helper to warn on the use of the `meta.targets` # temporary helper to warn on the use of the `meta.targets`
# attribute, which is deprecated in favour of `meta.ci.targets`. # attribute, which is deprecated in favour of `meta.ci.targets`.
subtargets = node: subtargets =
let targets = (node.meta.targets or [ ]) ++ (node.meta.ci.targets or [ ]); node:
in if node ? meta.targets then let
targets = (node.meta.targets or [ ]) ++ (node.meta.ci.targets or [ ]);
in
if node ? meta.targets then
builtins.trace '' builtins.trace ''
Warning: The meta.targets attribute is deprecated. Warning: The meta.targets attribute is deprecated.
Please move the subtargets of //${mkLabel node} to the Please move the subtargets of //${mkLabel node} to the
meta.ci.targets attribute. meta.ci.targets attribute.
 
'' '' targets
targets else targets; else
targets;
# Function which can be used to find all readTree targets within an # Function which can be used to find all readTree targets within an
# attribute set. # attribute set.
@ -231,23 +259,29 @@ let
# #
# eligible: Function to determine whether the given derivation # eligible: Function to determine whether the given derivation
# should be included in the build. # should be included in the build.
gather = eligible: node: gather =
eligible: node:
if node ? __readTree then if node ? __readTree then
# Include the node itself if it is eligible. # Include the node itself if it is eligible.
(if eligible node then [ node ] else [ ]) (if eligible node then [ node ] else [ ])
# Include eligible children of the node # Include eligible children of the node
++ concatMap (gather eligible) (map (attr: node."${attr}") node.__readTreeChildren) ++ concatMap (gather eligible) (map (attr: node."${attr}") node.__readTreeChildren)
# Include specified sub-targets of the node # Include specified sub-targets of the node
++ filter eligible (map ++ filter eligible (
(k: (node."${k}" or { }) // { map (
# Keep the same tree location, but explicitly mark this k:
# node as a subtarget. (node."${k}" or { })
__readTree = node.__readTree; // {
__readTreeChildren = [ ]; # Keep the same tree location, but explicitly mark this
__subtarget = k; # node as a subtarget.
}) __readTree = node.__readTree;
(subtargets node)) __readTreeChildren = [ ];
else [ ]; __subtarget = k;
}
) (subtargets node)
)
else
[ ];
# Determine whether a given value is a derivation. # Determine whether a given value is a derivation.
# Copied from nixpkgs/lib for cases where lib is not available yet. # Copied from nixpkgs/lib for cases where lib is not available yet.
@ -256,12 +290,14 @@ in
{ {
inherit gather mkLabel; inherit gather mkLabel;
__functor = _: __functor =
{ path _:
, args {
, filter ? (_parts: x: x) path,
, scopedArgs ? { } args,
, rootDir ? true filter ? (_parts: x: x),
scopedArgs ? { },
rootDir ? true,
}: }:
readTree { readTree {
inherit args scopedArgs rootDir; inherit args scopedArgs rootDir;
@ -285,43 +321,56 @@ in
# which should be able to access the restricted folder. # which should be able to access the restricted folder.
# #
# reason: Textual explanation for the restriction (included in errors) # reason: Textual explanation for the restriction (included in errors)
restrictFolder = { folder, exceptions ? [ ], reason }: parts: args: restrictFolder =
if (elemAt parts 0) == folder || elem parts exceptions {
then args folder,
else args // { exceptions ? [ ],
depot = args.depot // { reason,
"${folder}" = throw '' }:
Access to targets under //${folder} is not permitted from parts: args:
other repository paths. Specific exceptions are configured if (elemAt parts 0) == folder || elem parts exceptions then
at the top-level. args
else
args
// {
depot = args.depot // {
"${folder}" = throw ''
Access to targets under //${folder} is not permitted from
other repository paths. Specific exceptions are configured
at the top-level.
${reason} ${reason}
At location: ${builtins.concatStringsSep "." parts} At location: ${builtins.concatStringsSep "." parts}
''; '';
};
}; };
};
# This definition of fix is identical to <nixpkgs>.lib.fix, but is # This definition of fix is identical to <nixpkgs>.lib.fix, but is
# provided here for cases where readTree is used before nixpkgs can # provided here for cases where readTree is used before nixpkgs can
# be imported. # be imported.
# #
# It is often required to create the args attribute set. # It is often required to create the args attribute set.
fix = f: let x = f x; in x; fix =
f:
let
x = f x;
in
x;
# Takes an attribute set and adds a meta.ci.targets attribute to it # Takes an attribute set and adds a meta.ci.targets attribute to it
# which contains all direct children of the attribute set which are # which contains all direct children of the attribute set which are
# derivations. # derivations.
# #
# Type: attrs -> attrs # Type: attrs -> attrs
drvTargets = attrs: drvTargets =
attrs // { attrs:
attrs
// {
# preserve .meta from original attrs # preserve .meta from original attrs
meta = (attrs.meta or { }) // { meta = (attrs.meta or { }) // {
# preserve .meta.ci (except .targets) from original attrs # preserve .meta.ci (except .targets) from original attrs
ci = (attrs.meta.ci or { }) // { ci = (attrs.meta.ci or { }) // {
targets = builtins.filter targets = builtins.filter (x: isDerivation attrs."${x}") (builtins.attrNames attrs);
(x: isDerivation attrs."${x}")
(builtins.attrNames attrs);
}; };
}; };
}; };

View file

@ -1,18 +1,26 @@
{ depot, lib, pkgs, ... }: {
depot,
lib,
pkgs,
...
}:
let let
inherit (lib) partition optionalAttrs any; inherit (lib) partition optionalAttrs any;
inherit (builtins) tryEval; inherit (builtins) tryEval;
it = msg: asserts: it =
msg: asserts:
let let
results = partition (a: a.ok) asserts; results = partition (a: a.ok) asserts;
in in
{ {
_it = msg; _it = msg;
} // optionalAttrs (results.right != [ ]) { }
// optionalAttrs (results.right != [ ]) {
passes = map (result: result.test) results.right; passes = map (result: result.test) results.right;
} // optionalAttrs (results.wrong != [ ]) { }
// optionalAttrs (results.wrong != [ ]) {
fails = map (result: result.test) results.wrong; fails = map (result: result.test) results.wrong;
}; };
@ -21,16 +29,18 @@ let
ok = a == b; ok = a == b;
}; };
assertThrows = test: value: assertThrows =
test: value:
let let
value' = tryEval value; value' = tryEval value;
in in
{ {
inherit test; inherit test;
ok = ! value'.success; ok = !value'.success;
}; };
runTestsuite = name: its: runTestsuite =
name: its:
let let
fails = any (it': it' ? fails) its; fails = any (it': it' ? fails) its;
in in
@ -42,11 +52,14 @@ let
inherit its; inherit its;
} }
( (
if fails then '' if fails then
jq '.its' < .attrs.json ''
'' else '' jq '.its' < .attrs.json
jq '.its' < .attrs.json > $out ''
'' else
''
jq '.its' < .attrs.json > $out
''
); );
tree-ex = depot.nix.readTree { tree-ex = depot.nix.readTree {
@ -55,25 +68,14 @@ let
}; };
example = it "corresponds to the README example" [ example = it "corresponds to the README example" [
(assertEq "third_party attrset" (assertEq "third_party attrset" (
(lib.isAttrs tree-ex.third_party lib.isAttrs tree-ex.third_party && (!lib.isDerivation tree-ex.third_party)
&& (! lib.isDerivation tree-ex.third_party)) ) true)
true) (assertEq "third_party attrset other attribute" tree-ex.third_party.favouriteColour "orange")
(assertEq "third_party attrset other attribute" (assertEq "rustpkgs attrset aho-corasick" tree-ex.third_party.rustpkgs.aho-corasick "aho-corasick")
tree-ex.third_party.favouriteColour (assertEq "rustpkgs attrset serde" tree-ex.third_party.rustpkgs.serde "serde")
"orange") (assertEq "tools cheddear" "cheddar" tree-ex.tools.cheddar)
(assertEq "rustpkgs attrset aho-corasick" (assertEq "tools roquefort" tree-ex.tools.roquefort "roquefort")
tree-ex.third_party.rustpkgs.aho-corasick
"aho-corasick")
(assertEq "rustpkgs attrset serde"
tree-ex.third_party.rustpkgs.serde
"serde")
(assertEq "tools cheddear"
"cheddar"
tree-ex.tools.cheddar)
(assertEq "tools roquefort"
tree-ex.tools.roquefort
"roquefort")
]; ];
tree-tl = depot.nix.readTree { tree-tl = depot.nix.readTree {
@ -82,65 +84,64 @@ let
}; };
traversal-logic = it "corresponds to the traversal logic in the README" [ traversal-logic = it "corresponds to the traversal logic in the README" [
(assertEq "skip-tree/a is read" (assertEq "skip-tree/a is read" tree-tl.skip-tree.a "a is read normally")
tree-tl.skip-tree.a (assertEq "skip-tree does not contain b" (builtins.attrNames tree-tl.skip-tree) [
"a is read normally") "__readTree"
(assertEq "skip-tree does not contain b" "__readTreeChildren"
(builtins.attrNames tree-tl.skip-tree) "a"
[ "__readTree" "__readTreeChildren" "a" ]) ])
(assertEq "skip-tree children list does not contain b" (assertEq "skip-tree children list does not contain b" tree-tl.skip-tree.__readTreeChildren [ "a" ])
tree-tl.skip-tree.__readTreeChildren
[ "a" ])
(assertEq "skip subtree default.nix is read" (assertEq "skip subtree default.nix is read" tree-tl.skip-subtree.but
tree-tl.skip-subtree.but "the default.nix is still read"
"the default.nix is still read") )
(assertEq "skip subtree a/default.nix is skipped" (assertEq "skip subtree a/default.nix is skipped" (tree-tl.skip-subtree ? a) false)
(tree-tl.skip-subtree ? a) (assertEq "skip subtree b/c.nix is skipped" (tree-tl.skip-subtree ? b) false)
false)
(assertEq "skip subtree b/c.nix is skipped"
(tree-tl.skip-subtree ? b)
false)
(assertEq "skip subtree a/default.nix would be read without .skip-subtree" (assertEq "skip subtree a/default.nix would be read without .skip-subtree"
(tree-tl.no-skip-subtree.a) (tree-tl.no-skip-subtree.a)
"am I subtree yet?") "am I subtree yet?"
(assertEq "skip subtree b/c.nix would be read without .skip-subtree" )
(tree-tl.no-skip-subtree.b.c) (assertEq "skip subtree b/c.nix would be read without .skip-subtree" (tree-tl.no-skip-subtree.b.c
"cool") ) "cool")
(assertEq "default.nix attrset is merged with siblings" (assertEq "default.nix attrset is merged with siblings" tree-tl.default-nix.no
tree-tl.default-nix.no "siblings should be read"
"siblings should be read") )
(assertEq "default.nix means sibling isnt read" (assertEq "default.nix means sibling isnt read" (tree-tl.default-nix ? sibling) false)
(tree-tl.default-nix ? sibling)
false)
(assertEq "default.nix means subdirs are still read and merged into default.nix" (assertEq "default.nix means subdirs are still read and merged into default.nix"
(tree-tl.default-nix.subdir.a) (tree-tl.default-nix.subdir.a)
"but Im picked up") "but Im picked up"
)
(assertEq "default.nix can be not an attrset" (assertEq "default.nix can be not an attrset" tree-tl.default-nix.no-merge
tree-tl.default-nix.no-merge "Im not merged with any children"
"Im not merged with any children") )
(assertEq "default.nix is not an attrset -> children are not merged" (assertEq "default.nix is not an attrset -> children are not merged" (
(tree-tl.default-nix.no-merge ? subdir) tree-tl.default-nix.no-merge ? subdir
false) ) false)
(assertEq "default.nix can contain a derivation" (assertEq "default.nix can contain a derivation" (lib.isDerivation tree-tl.default-nix.can-be-drv)
(lib.isDerivation tree-tl.default-nix.can-be-drv) true
true) )
(assertEq "Even if default.nix is a derivation, children are traversed and merged" (assertEq "Even if default.nix is a derivation, children are traversed and merged"
tree-tl.default-nix.can-be-drv.subdir.a tree-tl.default-nix.can-be-drv.subdir.a
"Picked up through the drv") "Picked up through the drv"
(assertEq "default.nix drv is not changed by readTree" )
tree-tl.default-nix.can-be-drv (assertEq "default.nix drv is not changed by readTree" tree-tl.default-nix.can-be-drv (
(import ./test-tree-traversal/default-nix/can-be-drv/default.nix { })) import ./test-tree-traversal/default-nix/can-be-drv/default.nix { }
))
(assertEq "`here` argument represents the attrset a given file is part of" (assertEq "`here` argument represents the attrset a given file is part of"
(builtins.removeAttrs tree-tl.here-arg [ "__readTree" "__readTreeChildren" "subdir" ]) (builtins.removeAttrs tree-tl.here-arg [
"__readTree"
"__readTreeChildren"
"subdir"
])
{ {
attr1 = "foo"; attr1 = "foo";
attr2 = "foo"; attr2 = "foo";
attr3 = "sibl1"; attr3 = "sibl1";
}) }
)
]; ];
# these each call readTree themselves because the throws have to happen inside assertThrows # these each call readTree themselves because the throws have to happen inside assertThrows
@ -149,7 +150,8 @@ let
(depot.nix.readTree { (depot.nix.readTree {
path = ./test-wrong-not-a-function; path = ./test-wrong-not-a-function;
args = { }; args = { };
}).not-a-function) }).not-a-function
)
# cant test for that, assertThrows cant catch this error # cant test for that, assertThrows cant catch this error
# (assertThrows "this file is a function but doesnt have dots" # (assertThrows "this file is a function but doesnt have dots"
# (depot.nix.readTree {} ./test-wrong-no-dots).no-dots-in-function) # (depot.nix.readTree {} ./test-wrong-no-dots).no-dots-in-function)
@ -160,22 +162,36 @@ let
args = { }; args = { };
}; };
assertMarkerByPath = path: assertMarkerByPath =
path:
assertEq "${lib.concatStringsSep "." path} is marked correctly" assertEq "${lib.concatStringsSep "." path} is marked correctly"
(lib.getAttrFromPath path read-markers).__readTree (lib.getAttrFromPath path read-markers).__readTree
path; path;
markers = it "marks nodes correctly" [ markers = it "marks nodes correctly" [
(assertMarkerByPath [ "directory-marked" ]) (assertMarkerByPath [ "directory-marked" ])
(assertMarkerByPath [ "directory-marked" "nested" ]) (assertMarkerByPath [
(assertMarkerByPath [ "file-children" "one" ]) "directory-marked"
(assertMarkerByPath [ "file-children" "two" ]) "nested"
(assertEq "nix file children are marked correctly" ])
read-markers.file-children.__readTreeChildren [ "one" "two" ]) (assertMarkerByPath [
(assertEq "directory children are marked correctly" "file-children"
read-markers.directory-marked.__readTreeChildren [ "nested" ]) "one"
(assertEq "absence of children is marked" ])
read-markers.directory-marked.nested.__readTreeChildren [ ]) (assertMarkerByPath [
"file-children"
"two"
])
(assertEq "nix file children are marked correctly" read-markers.file-children.__readTreeChildren [
"one"
"two"
])
(assertEq "directory children are marked correctly" read-markers.directory-marked.__readTreeChildren
[ "nested" ]
)
(assertEq "absence of children is marked" read-markers.directory-marked.nested.__readTreeChildren
[ ]
)
]; ];
in in

View file

@ -1,3 +1,4 @@
{ ... }: { ... }:
{ } {
}

View file

@ -1,3 +1,4 @@
{ ... }: { ... }:
{ } {
}

View file

@ -1,3 +1,4 @@
{ ... }: { ... }:
{ } {
}

View file

@ -1,3 +1,4 @@
{ ... }: { ... }:
{ } {
}

View file

@ -3,5 +3,8 @@ derivation {
name = "im-a-drv"; name = "im-a-drv";
system = builtins.currentSystem; system = builtins.currentSystem;
builder = "/bin/sh"; builder = "/bin/sh";
args = [ "-c" ''echo "" > $out'' ]; args = [
"-c"
''echo "" > $out''
];
} }

View file

@ -1,4 +1,5 @@
{ here, ... }: { { here, ... }:
{
attr1 = "foo"; attr1 = "foo";
attr2 = here.attr1; attr2 = here.attr1;

View file

@ -1,2 +1 @@
{ here, ... }: { here, ... }: here.sibl1
here.sibl1

View file

@ -1,3 +1,3 @@
{}: { }:
"This is a function, but readTree wants to pass a bunch of arguments, and not having dots means we depend on exactly which arguments." "This is a function, but readTree wants to pass a bunch of arguments, and not having dots means we depend on exactly which arguments."

View file

@ -15,19 +15,19 @@
{ {
# root path to use as a reference point # root path to use as a reference point
root root,
, # list of paths below `root` that should be # list of paths below `root` that should be
# included in the resulting directory # included in the resulting directory
# #
# If path, need to refer to the actual file / directory to be included. # If path, need to refer to the actual file / directory to be included.
# If a string, it is treated as a string relative to the root. # If a string, it is treated as a string relative to the root.
paths paths,
, # (optional) name to use for the derivation # (optional) name to use for the derivation
# #
# This should always be set when using roots that do not have # This should always be set when using roots that do not have
# controlled names, such as when passing the top-level of a git # controlled names, such as when passing the top-level of a git
# repository (e.g. `depot.path.origSrc`). # repository (e.g. `depot.path.origSrc`).
name ? builtins.baseNameOf root name ? builtins.baseNameOf root,
}: }:
let let
@ -36,12 +36,14 @@ let
# Count slashes in a path. # Count slashes in a path.
# #
# Type: path -> int # Type: path -> int
depth = path: lib.pipe path [ depth =
toString path:
(builtins.split "/") lib.pipe path [
(builtins.filter builtins.isList) toString
builtins.length (builtins.split "/")
]; (builtins.filter builtins.isList)
builtins.length
];
# (Parent) directories will be created from deepest to shallowest # (Parent) directories will be created from deepest to shallowest
# which should mean no conflicts are caused unless both a child # which should mean no conflicts are caused unless both a child
@ -52,19 +54,24 @@ let
# Create a set which contains the source path to copy / symlink and # Create a set which contains the source path to copy / symlink and
# it's destination, so the path below the destination root including # it's destination, so the path below the destination root including
# a leading slash. Additionally some sanity checking is done. # a leading slash. Additionally some sanity checking is done.
makeSymlink = path: makeSymlink =
path:
let let
withLeading = p: if builtins.substring 0 1 p == "/" then p else "/" + p; withLeading = p: if builtins.substring 0 1 p == "/" then p else "/" + p;
fullPath = fullPath =
if builtins.isPath path then path if builtins.isPath path then
else if builtins.isString path then (root + withLeading path) path
else builtins.throw "Unsupported path type ${builtins.typeOf path}"; else if builtins.isString path then
(root + withLeading path)
else
builtins.throw "Unsupported path type ${builtins.typeOf path}";
strPath = toString fullPath; strPath = toString fullPath;
contextPath = "${fullPath}"; contextPath = "${fullPath}";
belowRoot = builtins.substring rootLength (-1) strPath; belowRoot = builtins.substring rootLength (-1) strPath;
prefix = builtins.substring 0 rootLength strPath; prefix = builtins.substring 0 rootLength strPath;
in in
assert toString root == prefix; { assert toString root == prefix;
{
src = contextPath; src = contextPath;
dst = belowRoot; dst = belowRoot;
}; };
@ -73,12 +80,13 @@ let
in in
# TODO(sterni): teach readTree to also read symlinked directories, # TODO(sterni): teach readTree to also read symlinked directories,
# so we ln -sT instead of cp -aT. # so we ln -sT instead of cp -aT.
pkgs.runCommand "sparse-${name}" { } ( pkgs.runCommand "sparse-${name}" { } (
lib.concatMapStrings lib.concatMapStrings (
({ src, dst }: '' { src, dst }:
''
mkdir -p "$(dirname "$out${dst}")" mkdir -p "$(dirname "$out${dst}")"
cp -aT --reflink=auto "${src}" "$out${dst}" cp -aT --reflink=auto "${src}" "$out${dst}"
'') ''
symlinks ) symlinks
) )

View file

@ -10,26 +10,23 @@ rec {
# #
# Type: stateMonad s a -> (a -> stateMonad s b) -> stateMonad s b # Type: stateMonad s a -> (a -> stateMonad s b) -> stateMonad s b
bind = action: f: state: bind =
action: f: state:
let let
afterAction = action state; afterAction = action state;
in in
(f afterAction.value) afterAction.state; (f afterAction.value) afterAction.state;
# Type: stateMonad s a -> stateMonad s b -> stateMonad s b # Type: stateMonad s a -> stateMonad s b -> stateMonad s b
after = action1: action2: state: action2 (action1 state).state; after =
action1: action2: state:
action2 (action1 state).state;
# Type: stateMonad s (stateMonad s a) -> stateMonad s a # Type: stateMonad s (stateMonad s a) -> stateMonad s a
join = action: bind action (action': action'); join = action: bind action (action': action');
# Type: [a] -> (a -> stateMonad s b) -> stateMonad s null # Type: [a] -> (a -> stateMonad s b) -> stateMonad s null
for_ = xs: f: for_ = xs: f: builtins.foldl' (laterAction: x: after (f x) laterAction) (pure null) xs;
builtins.foldl'
(laterAction: x:
after (f x) laterAction
)
(pure null)
xs;
# #
# Applicative # Applicative
@ -52,10 +49,16 @@ rec {
# #
# Type: (s -> s) -> stateMonad s null # Type: (s -> s) -> stateMonad s null
modify = f: state: { value = null; state = f state; }; modify = f: state: {
value = null;
state = f state;
};
# Type: stateMonad s s # Type: stateMonad s s
get = state: { value = state; inherit state; }; get = state: {
value = state;
inherit state;
};
# Type: s -> stateMonad s null # Type: s -> stateMonad s null
set = new: modify (_: new); set = new: modify (_: new);
@ -64,9 +67,15 @@ rec {
getAttr = attr: fmap (state: state.${attr}) get; getAttr = attr: fmap (state: state.${attr}) get;
# Type: str -> (any -> any) -> stateMonad s null # Type: str -> (any -> any) -> stateMonad s null
modifyAttr = attr: f: modify (state: state // { modifyAttr =
${attr} = f state.${attr}; attr: f:
}); modify (
state:
state
// {
${attr} = f state.${attr};
}
);
# Type: str -> any -> stateMonad s null # Type: str -> any -> stateMonad s null
setAttr = attr: value: modifyAttr attr (_: value); setAttr = attr: value: modifyAttr attr (_: value);

View file

@ -3,28 +3,33 @@ let
# Takes a tag, checks whether it is an attrset with one element, # Takes a tag, checks whether it is an attrset with one element,
# if so sets `isTag` to `true` and sets the name and value. # if so sets `isTag` to `true` and sets the name and value.
# If not, sets `isTag` to `false` and sets `errmsg`. # If not, sets `isTag` to `false` and sets `errmsg`.
verifyTag = tag: verifyTag =
tag:
let let
cases = builtins.attrNames tag; cases = builtins.attrNames tag;
len = builtins.length cases; len = builtins.length cases;
in in
if builtins.length cases == 1 if builtins.length cases == 1 then
then let
let name = builtins.head cases; in { name = builtins.head cases;
in
{
isTag = true; isTag = true;
name = name; name = name;
val = tag.${name}; val = tag.${name};
errmsg = null; errmsg = null;
} }
else { else
isTag = false; {
errmsg = isTag = false;
("match: an instance of a sum is an attrset " errmsg = (
"match: an instance of a sum is an attrset "
+ "with exactly one element, yours had ${toString len}" + "with exactly one element, yours had ${toString len}"
+ ", namely: ${lib.generators.toPretty {} cases}"); + ", namely: ${lib.generators.toPretty { } cases}"
name = null; );
val = null; name = null;
}; val = null;
};
# Returns the tag name of a given tag attribute set. # Returns the tag name of a given tag attribute set.
# Throws if the tag is invalid. # Throws if the tag is invalid.
@ -39,11 +44,15 @@ let
tagValue = tag: (assertIsTag tag).val; tagValue = tag: (assertIsTag tag).val;
# like `verifyTag`, but throws the error message if it is not a tag. # like `verifyTag`, but throws the error message if it is not a tag.
assertIsTag = tag: assertIsTag =
let res = verifyTag tag; in tag:
let
res = verifyTag tag;
in
assert res.isTag || throw res.errmsg; assert res.isTag || throw res.errmsg;
{ inherit (res) name val; }; {
inherit (res) name val;
};
# Discriminator for values. # Discriminator for values.
# Goes through a list of tagged predicates `{ <tag> = <pred>; }` # Goes through a list of tagged predicates `{ <tag> = <pred>; }`
@ -64,22 +73,22 @@ let
# { negative = i: i < 0; } # { negative = i: i < 0; }
# ] 1 # ] 1
# => { smol = 1; } # => { smol = 1; }
discrDef = defTag: fs: v: discrDef =
defTag: fs: v:
let let
res = lib.findFirst res = lib.findFirst (t: t.val v) null (map assertIsTag fs);
(t: t.val v)
null
(map assertIsTag fs);
in in
if res == null if res == null then { ${defTag} = v; } else { ${res.name} = v; };
then { ${defTag} = v; }
else { ${res.name} = v; };
# Like `discrDef`, but fail if there is no match. # Like `discrDef`, but fail if there is no match.
discr = fs: v: discr =
let res = discrDef null fs v; in fs: v:
assert lib.assertMsg (res != { }) let
"tag.discr: No predicate found that matches ${lib.generators.toPretty {} v}"; res = discrDef null fs v;
in
assert lib.assertMsg (
res != { }
) "tag.discr: No predicate found that matches ${lib.generators.toPretty { } v}";
res; res;
# The canonical pattern matching primitive. # The canonical pattern matching primitive.
@ -104,20 +113,27 @@ let
# match success matcher == 43 # match success matcher == 43
# && match failure matcher == 0; # && match failure matcher == 0;
# #
match = sum: matcher: match =
let cases = builtins.attrNames sum; sum: matcher:
in assert let
let len = builtins.length cases; in cases = builtins.attrNames sum;
lib.assertMsg (len == 1) in
("match: an instance of a sum is an attrset " assert
let
len = builtins.length cases;
in
lib.assertMsg (len == 1) (
"match: an instance of a sum is an attrset "
+ "with exactly one element, yours had ${toString len}" + "with exactly one element, yours had ${toString len}"
+ ", namely: ${lib.generators.toPretty {} cases}"); + ", namely: ${lib.generators.toPretty { } cases}"
let case = builtins.head cases; );
in assert let
lib.assertMsg (matcher ? ${case}) case = builtins.head cases;
("match: \"${case}\" is not a valid case of this sum, " in
+ "the matcher accepts: ${lib.generators.toPretty {} assert lib.assertMsg (matcher ? ${case}) (
(builtins.attrNames matcher)}"); "match: \"${case}\" is not a valid case of this sum, "
+ "the matcher accepts: ${lib.generators.toPretty { } (builtins.attrNames matcher)}"
);
matcher.${case} sum.${case}; matcher.${case} sum.${case};
# A `match` with the arguments flipped. # A `match` with the arguments flipped.

View file

@ -1,100 +1,99 @@
{ depot, lib, ... }: { depot, lib, ... }:
let let
/* Get the basename of a store path without /*
the leading hash. Get the basename of a store path without
the leading hash.
Type: (path | drv | string) -> string Type: (path | drv | string) -> string
Example: Example:
storePathName ./foo.c storePathName ./foo.c
=> "foo.c" => "foo.c"
storePathName (writeText "foo.c" "int main() { return 0; }") storePathName (writeText "foo.c" "int main() { return 0; }")
=> "foo.c" => "foo.c"
storePathName "${hello}/bin/hello" storePathName "${hello}/bin/hello"
=> "hello" => "hello"
*/ */
storePathName = p: storePathName =
if lib.isDerivation p p:
then p.name if lib.isDerivation p then
else if builtins.isPath p p.name
then builtins.baseNameOf p else if builtins.isPath p then
else if builtins.isString p || (builtins.isAttrs p && (p ? outPath || p ? __toString)) builtins.baseNameOf p
then else if builtins.isString p || (builtins.isAttrs p && (p ? outPath || p ? __toString)) then
let let
strPath = toString p; strPath = toString p;
# strip leading storeDir and trailing slashes # strip leading storeDir and trailing slashes
noStoreDir = lib.removeSuffix "/" noStoreDir = lib.removeSuffix "/" (lib.removePrefix "${builtins.storeDir}/" strPath);
(lib.removePrefix "${builtins.storeDir}/" strPath);
# a basename of a child of a store path isn't really # a basename of a child of a store path isn't really
# referring to a store path, so removing the string # referring to a store path, so removing the string
# context is safe (e. g. "hello" for "${hello}/bin/hello"). # context is safe (e. g. "hello" for "${hello}/bin/hello").
basename = builtins.unsafeDiscardStringContext basename = builtins.unsafeDiscardStringContext (builtins.baseNameOf strPath);
(builtins.baseNameOf strPath);
in in
# If p is a direct child of storeDir, we need to remove # If p is a direct child of storeDir, we need to remove
# the leading hash as well to make sure that: # the leading hash as well to make sure that:
# `storePathName drv == storePathName (toString drv)`. # `storePathName drv == storePathName (toString drv)`.
if noStoreDir == basename if noStoreDir == basename then builtins.substring 33 (-1) basename else basename
then builtins.substring 33 (-1) basename else
else basename builtins.throw "Don't know how to get (base)name of " + lib.generators.toPretty { } p;
else builtins.throw "Don't know how to get (base)name of "
+ lib.generators.toPretty { } p;
/* Query the type of a path exposing the same information as would be by /*
`builtins.readDir`, but for a single, specific target path. Query the type of a path exposing the same information as would be by
`builtins.readDir`, but for a single, specific target path.
The information is returned as a tagged value, i. e. an attribute set with The information is returned as a tagged value, i. e. an attribute set with
exactly one attribute where the type of the path is encoded in the name exactly one attribute where the type of the path is encoded in the name
of the single attribute. The allowed tags and values are as follows: of the single attribute. The allowed tags and values are as follows:
* `regular`: is a regular file, always `true` if returned * `regular`: is a regular file, always `true` if returned
* `directory`: is a directory, always `true` if returned * `directory`: is a directory, always `true` if returned
* `missing`: path does not exist, always `true` if returned * `missing`: path does not exist, always `true` if returned
* `symlink`: path is a symlink, always `true` if returned * `symlink`: path is a symlink, always `true` if returned
Type: path(-like) -> tag Type: path(-like) -> tag
`tag` refers to the attribute set format of `//nix/tag`. `tag` refers to the attribute set format of `//nix/tag`.
Example: Example:
pathType ./foo.c pathType ./foo.c
=> { regular = true; } => { regular = true; }
pathType /home/lukas pathType /home/lukas
=> { directory = true; } => { directory = true; }
pathType ./result pathType ./result
=> { symlink = true; } => { symlink = true; }
pathType ./link-to-file pathType ./link-to-file
=> { symlink = true; } => { symlink = true; }
pathType /does/not/exist pathType /does/not/exist
=> { missing = true; } => { missing = true; }
# Check if a path exists # Check if a path exists
!(pathType /file ? missing) !(pathType /file ? missing)
# Check if a path is a directory or a symlink to a directory # Check if a path is a directory or a symlink to a directory
# A handy shorthand for this is provided as `realPathIsDirectory`. # A handy shorthand for this is provided as `realPathIsDirectory`.
pathType /path ? directory || (pathType /path).symlink or null == "directory" pathType /path ? directory || (pathType /path).symlink or null == "directory"
# Match on the result using //nix/tag # Match on the result using //nix/tag
nix.tag.match (nix.utils.pathType ./result) { nix.tag.match (nix.utils.pathType ./result) {
symlink = _: "symlink"; symlink = _: "symlink";
directory = _: "directory"; directory = _: "directory";
regular = _: "regular"; regular = _: "regular";
missing = _: "path does not exist"; missing = _: "path does not exist";
} }
=> "symlink" => "symlink"
# Query path type # Query path type
nix.tag.tagName (pathType /path) nix.tag.tagName (pathType /path)
*/ */
pathType = path: pathType =
path:
let let
# baseNameOf is very annoyed if we proceed with string context. # baseNameOf is very annoyed if we proceed with string context.
# We need to call toString to prevent unsafeDiscardStringContext # We need to call toString to prevent unsafeDiscardStringContext
@ -119,52 +118,56 @@ let
${thisPathType} = true; ${thisPathType} = true;
}; };
pathType' = path: pathType' =
path:
let let
p = pathType path; p = pathType path;
in in
if p ? missing if p ? missing then builtins.throw "${lib.generators.toPretty { } path} does not exist" else p;
then builtins.throw "${lib.generators.toPretty {} path} does not exist"
else p;
/* Check whether the given path is a directory. /*
Throws if the path in question doesn't exist. Check whether the given path is a directory.
Throws if the path in question doesn't exist.
Type: path(-like) -> bool Type: path(-like) -> bool
*/ */
isDirectory = path: pathType' path ? directory; isDirectory = path: pathType' path ? directory;
/* Check whether the given path is a regular file. /*
Throws if the path in question doesn't exist. Check whether the given path is a regular file.
Throws if the path in question doesn't exist.
Type: path(-like) -> bool Type: path(-like) -> bool
*/ */
isRegularFile = path: pathType' path ? regular; isRegularFile = path: pathType' path ? regular;
/* Check whether the given path is a symbolic link. /*
Throws if the path in question doesn't exist. Check whether the given path is a symbolic link.
Throws if the path in question doesn't exist.
Type: path(-like) -> bool Type: path(-like) -> bool
*/ */
isSymlink = path: pathType' path ? symlink; isSymlink = path: pathType' path ? symlink;
/* Checks whether the given value is (or contains) a reference to a /*
path that will be retained in the store path resulting from a derivation. Checks whether the given value is (or contains) a reference to a
So if isReferencablePath returns true, the given value may be used in a path that will be retained in the store path resulting from a derivation.
way that allows accessing it at runtime of any Nix built program. So if isReferencablePath returns true, the given value may be used in a
way that allows accessing it at runtime of any Nix built program.
Returns true for: Returns true for:
- Strings with context (if the string is/contains a single path is not verified!) - Strings with context (if the string is/contains a single path is not verified!)
- Path values - Path values
- Derivations - Derivations
Note that the value still needs to used in a way that forces string context Note that the value still needs to used in a way that forces string context
(and thus reference tracking) to be created, e.g. in string interpolation. (and thus reference tracking) to be created, e.g. in string interpolation.
Type: any -> bool Type: any -> bool
*/ */
isReferencablePath = value: isReferencablePath =
value:
builtins.isPath value builtins.isPath value
|| lib.isDerivation value || lib.isDerivation value
|| (builtins.isString value && builtins.hasContext value); || (builtins.isString value && builtins.hasContext value);

View file

@ -1,35 +1,55 @@
{ depot, lib, pkgs, ... }: {
depot,
lib,
pkgs,
...
}:
let let
inherit (lib) fix pipe mapAttrsToList isAttrs concatLines isString isDerivation isPath; inherit (lib)
fix
pipe
mapAttrsToList
isAttrs
concatLines
isString
isDerivation
isPath
;
inherit (depot.nix.utils) isReferencablePath; inherit (depot.nix.utils) isReferencablePath;
esc = s: lib.escapeShellArg /* ensure paths import into store */ "${s}"; esc =
s:
lib.escapeShellArg # ensure paths import into store
"${s}";
writeTreeAtPath = path: tree: writeTreeAtPath =
path: tree:
'' ''
mkdir -p "$out/"${esc path} mkdir -p "$out/"${esc path}
'' ''
+ pipe tree [ + pipe tree [
(mapAttrsToList (k: v: (mapAttrsToList (
k: v:
if isReferencablePath v then if isReferencablePath v then
"cp -R --reflink=auto ${esc "${v}"} \"$out/\"${esc path}/${esc k}" "cp -R --reflink=auto ${esc "${v}"} \"$out/\"${esc path}/${esc k}"
else if lib.isAttrs v then else if lib.isAttrs v then
writeTreeAtPath (path + "/" + k) v writeTreeAtPath (path + "/" + k) v
else else
throw "invalid type (expected path, derivation, string with context, or attrs)")) throw "invalid type (expected path, derivation, string with context, or attrs)"
))
concatLines concatLines
]; ];
/* Create a directory tree specified by a Nix attribute set structure. /*
Create a directory tree specified by a Nix attribute set structure.
Each value in `tree` should either be a file, a directory, or another tree Each value in `tree` should either be a file, a directory, or another tree
attribute set. Those paths will be written to a directory tree attribute set. Those paths will be written to a directory tree
corresponding to the structure of the attribute set. corresponding to the structure of the attribute set.
Type: string -> attrSet -> derivation Type: string -> attrSet -> derivation
*/ */
writeTree = name: tree: writeTree = name: tree: pkgs.runCommandLocal name { } (writeTreeAtPath "" tree);
pkgs.runCommandLocal name { } (writeTreeAtPath "" tree);
in in
# __functor trick so readTree can add the tests attribute # __functor trick so readTree can add the tests attribute

View file

@ -1,93 +1,102 @@
{ depot, pkgs, lib, ... }: {
depot,
pkgs,
lib,
...
}:
let let
inherit (pkgs) runCommand writeText writeTextFile; inherit (pkgs) runCommand writeText writeTextFile;
inherit (depot.nix) writeTree; inherit (depot.nix) writeTree;
checkTree = name: tree: expected: checkTree =
name: tree: expected:
runCommand "writeTree-test-${name}" runCommand "writeTree-test-${name}"
{ {
nativeBuildInputs = [ pkgs.buildPackages.lr ]; nativeBuildInputs = [ pkgs.buildPackages.lr ];
passAsFile = [ "expected" ]; passAsFile = [ "expected" ];
inherit expected; inherit expected;
} '' }
actualPath="$NIX_BUILD_TOP/actual" ''
cd ${lib.escapeShellArg (writeTree name tree)} actualPath="$NIX_BUILD_TOP/actual"
lr . > "$actualPath" cd ${lib.escapeShellArg (writeTree name tree)}
diff -u "$expectedPath" "$actualPath" | tee "$out" lr . > "$actualPath"
''; diff -u "$expectedPath" "$actualPath" | tee "$out"
'';
in in
depot.nix.readTree.drvTargets { depot.nix.readTree.drvTargets {
empty = checkTree "empty" { } empty = checkTree "empty" { } ''
'' .
. '';
'';
simple-paths = checkTree "simple" simple-paths =
{ checkTree "simple"
writeTree = { {
meta = { writeTree = {
"owners.txt" = ../OWNERS; meta = {
"owners.txt" = ../OWNERS;
};
"code.nix" = ../default.nix;
all-tests = ./.;
nested.dirs.eval-time = builtins.toFile "owothia" ''
hold me owo
'';
}; };
"code.nix" = ../default.nix; }
all-tests = ./.; ''
nested.dirs.eval-time = builtins.toFile "owothia" '' .
hold me owo ./writeTree
''; ./writeTree/all-tests
}; ./writeTree/all-tests/default.nix
} ./writeTree/code.nix
'' ./writeTree/meta
. ./writeTree/meta/owners.txt
./writeTree ./writeTree/nested
./writeTree/all-tests ./writeTree/nested/dirs
./writeTree/all-tests/default.nix ./writeTree/nested/dirs/eval-time
./writeTree/code.nix
./writeTree/meta
./writeTree/meta/owners.txt
./writeTree/nested
./writeTree/nested/dirs
./writeTree/nested/dirs/eval-time
'';
empty-dirs = checkTree "empty-dirs"
{
this.dir.is.empty = { };
so.is.this.one = { };
}
''
.
./so
./so/is
./so/is/this
./so/is/this/one
./this
./this/dir
./this/dir/is
./this/dir/is/empty
'';
drvs = checkTree "drvs"
{
file-drv = writeText "road.txt" ''
Any road followed precisely to its end leads precisely nowhere.
''; '';
dir-drv = writeTextFile {
name = "dir-of-text"; empty-dirs =
destination = "/text/in/more/dirs.txt"; checkTree "empty-dirs"
text = '' {
Climb the mountain just a little bit to test that its a mountain. this.dir.is.empty = { };
From the top of the mountain, you cannot see the mountain. so.is.this.one = { };
}
''
.
./so
./so/is
./so/is/this
./so/is/this/one
./this
./this/dir
./this/dir/is
./this/dir/is/empty
'';
drvs =
checkTree "drvs"
{
file-drv = writeText "road.txt" ''
Any road followed precisely to its end leads precisely nowhere.
''; '';
}; dir-drv = writeTextFile {
} name = "dir-of-text";
'' destination = "/text/in/more/dirs.txt";
. text = ''
./dir-drv Climb the mountain just a little bit to test that its a mountain.
./dir-drv/text From the top of the mountain, you cannot see the mountain.
./dir-drv/text/in '';
./dir-drv/text/in/more };
./dir-drv/text/in/more/dirs.txt }
./file-drv ''
''; .
./dir-drv
./dir-drv/text
./dir-drv/text/in
./dir-drv/text/in/more
./dir-drv/text/in/more/dirs.txt
./file-drv
'';
} }

View file

@ -1,4 +1,9 @@
{ depot, lib, pkgs, ... }: {
depot,
lib,
pkgs,
...
}:
depot.nix.readTree.drvTargets rec { depot.nix.readTree.drvTargets rec {
terraform = pkgs.terraform.withPlugins (p: [ terraform = pkgs.terraform.withPlugins (p: [

View file

@ -1,4 +1,5 @@
{ ... }: { { ... }:
{
node_exporter = ./json/node_exporter.json; node_exporter = ./json/node_exporter.json;
all = ./json; all = ./json;
} }

View file

@ -1,4 +1,9 @@
{ depot, lib, pkgs, ... }: {
depot,
lib,
pkgs,
...
}:
depot.nix.readTree.drvTargets rec { depot.nix.readTree.drvTargets rec {
# Provide a Terraform wrapper with the right provider installed. # Provide a Terraform wrapper with the right provider installed.

View file

@ -1,4 +1,9 @@
{ depot, pkgs, lib, ... }: {
depot,
pkgs,
lib,
...
}:
{ gerrit }: { gerrit }:
let let

View file

@ -1,4 +1,9 @@
{ depot, lib, pkgs, ... }: {
depot,
lib,
pkgs,
...
}:
depot.nix.readTree.drvTargets rec { depot.nix.readTree.drvTargets rec {
terraform = pkgs.terraform.withPlugins (p: [ terraform = pkgs.terraform.withPlugins (p: [

View file

@ -1,4 +1,9 @@
{ depot, lib, pkgs, ... }: {
depot,
lib,
pkgs,
...
}:
depot.nix.readTree.drvTargets rec { depot.nix.readTree.drvTargets rec {
terraform = pkgs.terraform.withPlugins (p: [ terraform = pkgs.terraform.withPlugins (p: [

View file

@ -1,4 +1,9 @@
{ depot, lib, pkgs, ... }: {
depot,
lib,
pkgs,
...
}:
depot.nix.readTree.drvTargets rec { depot.nix.readTree.drvTargets rec {
# Provide a Terraform wrapper with the right provider installed. # Provide a Terraform wrapper with the right provider installed.

View file

@ -21,10 +21,11 @@ in
path = [ depot.contrib.archivist.parse-bucket-logs ]; path = [ depot.contrib.archivist.parse-bucket-logs ];
serviceConfig = { serviceConfig = {
Type = "oneshot"; Type = "oneshot";
ExecStart = (pkgs.writers.writePython3 "parse-bucket-logs-continuously" ExecStart = (
{ pkgs.writers.writePython3 "parse-bucket-logs-continuously" {
libraries = [ pkgs.python3Packages.boto3 ]; libraries = [ pkgs.python3Packages.boto3 ];
} ./parse-bucket-logs-continuously.py); } ./parse-bucket-logs-continuously.py
);
DynamicUser = "yes"; DynamicUser = "yes";
StateDirectory = "parse-bucket-logs"; StateDirectory = "parse-bucket-logs";
}; };
@ -38,4 +39,3 @@ in
system.stateVersion = "23.05"; # Did you read the comment? system.stateVersion = "23.05"; # Did you read the comment?
} }

View file

@ -1,27 +1,30 @@
{ lib, modulesPath, ... }: { lib, modulesPath, ... }:
{ {
imports = imports = [
[ (modulesPath + "/profiles/qemu-guest.nix")
(modulesPath + "/profiles/qemu-guest.nix") ];
];
boot.initrd.availableKernelModules = [ "ahci" "xhci_pci" "virtio_pci" "sr_mod" "virtio_blk" ]; boot.initrd.availableKernelModules = [
"ahci"
"xhci_pci"
"virtio_pci"
"sr_mod"
"virtio_blk"
];
boot.initrd.kernelModules = [ ]; boot.initrd.kernelModules = [ ];
boot.kernelModules = [ "kvm-amd" ]; boot.kernelModules = [ "kvm-amd" ];
boot.extraModulePackages = [ ]; boot.extraModulePackages = [ ];
fileSystems."/" = fileSystems."/" = {
{ device = "/dev/disk/by-partlabel/root";
device = "/dev/disk/by-partlabel/root"; fsType = "xfs";
fsType = "xfs"; };
};
fileSystems."/boot" = fileSystems."/boot" = {
{ device = "/dev/disk/by-partlabel/boot";
device = "/dev/disk/by-partlabel/boot"; fsType = "vfat";
fsType = "vfat"; };
};
swapDevices = [ ]; swapDevices = [ ];

View file

@ -1,4 +1,9 @@
{ depot, lib, pkgs, ... }: # readTree options {
depot,
lib,
pkgs,
...
}: # readTree options
{ config, ... }: # passed by module system { config, ... }: # passed by module system
let let
mod = name: depot.path.origSrc + ("/ops/modules/" + name); mod = name: depot.path.origSrc + ("/ops/modules/" + name);
@ -24,7 +29,11 @@ in
boot.specialFileSystems = lib.mkForce { boot.specialFileSystems = lib.mkForce {
"/run/wrappers" = { "/run/wrappers" = {
fsType = "tmpfs"; fsType = "tmpfs";
options = [ "nodev" "mode=755" "size=${config.security.wrapperDirSize}" ]; options = [
"nodev"
"mode=755"
"size=${config.security.wrapperDirSize}"
];
}; };
}; };
@ -56,10 +65,17 @@ in
interfaces.host0.ipv6 = { interfaces.host0.ipv6 = {
addresses = [ addresses = [
{ address = "2001:bc8:38ee:100:7000::20"; prefixLength = 64; } {
address = "2001:bc8:38ee:100:7000::20";
prefixLength = 64;
}
]; ];
routes = [ routes = [
{ address = "64:ff9b::"; via = "2001:bc8:38ee:100::100"; prefixLength = 96; } {
address = "64:ff9b::";
via = "2001:bc8:38ee:100::100";
prefixLength = 96;
}
]; ];
}; };
@ -112,7 +128,8 @@ in
]; ];
time.timeZone = "UTC"; time.timeZone = "UTC";
users.users.root.openssh.authorizedKeys.keys = depot.ops.users.edef ++ depot.ops.users.flokli ++ depot.ops.users.raito; users.users.root.openssh.authorizedKeys.keys =
depot.ops.users.edef ++ depot.ops.users.flokli ++ depot.ops.users.raito;
users.groups.kvm = { }; users.groups.kvm = { };
users.users.root.extraGroups = [ "kvm" ]; users.users.root.extraGroups = [ "kvm" ];

View file

@ -1,4 +1,9 @@
{ depot, lib, pkgs, ... }: # readTree options {
depot,
lib,
pkgs,
...
}: # readTree options
{ config, ... }: # passed by module system { config, ... }: # passed by module system
let let
@ -111,7 +116,8 @@ in
createHome = true; createHome = true;
home = "/var/lib/git"; home = "/var/lib/git";
}; };
users.root.openssh.authorizedKeys.keys = depot.ops.users.edef ++ depot.ops.users.flokli ++ depot.ops.users.raito; users.root.openssh.authorizedKeys.keys =
depot.ops.users.edef ++ depot.ops.users.flokli ++ depot.ops.users.raito;
}; };
boot.initrd.systemd.enable = true; boot.initrd.systemd.enable = true;

View file

@ -1,4 +1,9 @@
{ depot, lib, pkgs, ... }: # readTree options {
depot,
lib,
pkgs,
...
}: # readTree options
{ config, ... }: # passed by module system { config, ... }: # passed by module system
let let
@ -130,7 +135,8 @@ in
# Required for prometheus to be able to scrape stats # Required for prometheus to be able to scrape stats
services.nginx.statusPage = true; services.nginx.statusPage = true;
users.users.root.openssh.authorizedKeys.keys = depot.ops.users.edef ++ depot.ops.users.flokli ++ depot.ops.users.raito; users.users.root.openssh.authorizedKeys.keys =
depot.ops.users.edef ++ depot.ops.users.flokli ++ depot.ops.users.raito;
boot.initrd.systemd.enable = true; boot.initrd.systemd.enable = true;
zramSwap.enable = true; zramSwap.enable = true;

View file

@ -1,4 +1,9 @@
{ depot, lib, pkgs, ... }: # readTree options {
depot,
lib,
pkgs,
...
}: # readTree options
{ config, ... }: # passed by module system { config, ... }: # passed by module system
let let
@ -158,7 +163,8 @@ in
# Required for prometheus to be able to scrape stats # Required for prometheus to be able to scrape stats
services.nginx.statusPage = true; services.nginx.statusPage = true;
users.users.root.openssh.authorizedKeys.keys = depot.ops.users.edef ++ depot.ops.users.flokli ++ depot.ops.users.raito; users.users.root.openssh.authorizedKeys.keys =
depot.ops.users.edef ++ depot.ops.users.flokli ++ depot.ops.users.raito;
boot.initrd.systemd.enable = true; boot.initrd.systemd.enable = true;
zramSwap.enable = true; zramSwap.enable = true;

View file

@ -1,18 +1,24 @@
{ depot, pkgs, lib, ... }: # readTree options {
depot,
pkgs,
lib,
...
}: # readTree options
{ config, ... }: # passed by module system { config, ... }: # passed by module system
let let
srvos = srvos = import (
import (builtins.fetchTarball { builtins.fetchTarball {
url = "https://github.com/nix-community/srvos/archive/8e7d3c690975ee6790926bdfd1258016c967d163.tar.gz"; url = "https://github.com/nix-community/srvos/archive/8e7d3c690975ee6790926bdfd1258016c967d163.tar.gz";
sha256 = "sha256-l7epHqAcg8Qktu8vO2ZfjSH1wcai01XQOKQA9ADHIk4="; sha256 = "sha256-l7epHqAcg8Qktu8vO2ZfjSH1wcai01XQOKQA9ADHIk4=";
}); }
disko = );
(builtins.fetchTarball { disko = (
builtins.fetchTarball {
url = "https://github.com/nix-community/disko/archive/84dd8eea9a06006d42b8af7cfd4fda4cf334db81.tar.gz"; url = "https://github.com/nix-community/disko/archive/84dd8eea9a06006d42b8af7cfd4fda4cf334db81.tar.gz";
sha256 = "13mfnjnjp21wms4mw35ar019775qgy3fnjc59zrpnqbkfmzyvv02"; sha256 = "13mfnjnjp21wms4mw35ar019775qgy3fnjc59zrpnqbkfmzyvv02";
}); }
);
in in
{ {
@ -38,7 +44,6 @@ in
forceSSL = true; forceSSL = true;
}; };
security.acme.acceptTerms = true; security.acme.acceptTerms = true;
security.acme.defaults.email = "admin+acme@numtide.com"; security.acme.defaults.email = "admin+acme@numtide.com";
@ -48,11 +53,9 @@ in
systemd.network.networks."10-uplink".networkConfig.Address = "2a01:4f9:3071:1091::2/64"; systemd.network.networks."10-uplink".networkConfig.Address = "2a01:4f9:3071:1091::2/64";
# Enable SSH and add some keys # Enable SSH and add some keys
services.openssh.enable = true; services.openssh.enable = true;
users.users.root.openssh.authorizedKeys.keys = users.users.root.openssh.authorizedKeys.keys =
depot.ops.users.edef depot.ops.users.edef
++ depot.ops.users.flokli ++ depot.ops.users.flokli

View file

@ -174,7 +174,8 @@ in
}; };
}; };
systemd.services.grafana.serviceConfig.LoadCredential = "github_auth_client_secret:/etc/secrets/grafana_github_auth_client_secret"; systemd.services.grafana.serviceConfig.LoadCredential =
"github_auth_client_secret:/etc/secrets/grafana_github_auth_client_secret";
systemd.services.grafana.serviceConfig.RuntimeDirectory = "grafana"; systemd.services.grafana.serviceConfig.RuntimeDirectory = "grafana";
systemd.services.grafana.serviceConfig.SupplementaryGroups = "nginx"; systemd.services.grafana.serviceConfig.SupplementaryGroups = "nginx";

View file

@ -1,9 +1,10 @@
{ config {
, lib config,
, utils lib,
, pkgs utils,
, depot pkgs,
, ... depot,
...
}: }:
let let
cfg = config.services.nar-bridge; cfg = config.services.nar-bridge;

View file

@ -9,13 +9,15 @@
virtualHosts.${config.machine.domain} = { virtualHosts.${config.machine.domain} = {
locations."=/" = { locations."=/" = {
tryFiles = "$uri $uri/index.html =404"; tryFiles = "$uri $uri/index.html =404";
root = pkgs.runCommand "index" root =
{ pkgs.runCommand "index"
nativeBuildInputs = [ pkgs.markdown2html-converter ]; {
} '' nativeBuildInputs = [ pkgs.markdown2html-converter ];
mkdir -p $out }
markdown2html-converter ${./README.md} -o $out/index.html ''
''; mkdir -p $out
markdown2html-converter ${./README.md} -o $out/index.html
'';
}; };
locations."/" = { locations."/" = {
proxyPass = "http://unix:/run/nar-bridge.sock:/"; proxyPass = "http://unix:/run/nar-bridge.sock:/";

View file

@ -3,10 +3,21 @@
# Thanks to all the Lix core developers for this! # Thanks to all the Lix core developers for this!
# vim: et:ts=2:sw=2: # vim: et:ts=2:sw=2:
# #
{ depot, pkgs, lib, config, ... }: {
depot,
pkgs,
lib,
config,
...
}:
let let
cfg = config.services.depot.forgejo; cfg = config.services.depot.forgejo;
inherit (lib) types mkEnableOption mkOption mkIf; inherit (lib)
types
mkEnableOption
mkOption
mkIf
;
emojo = emojo =
let let
drgn = pkgs.fetchzip { drgn = pkgs.fetchzip {
@ -39,7 +50,15 @@ let
''; '';
}; };
in in
pkgs.symlinkJoin { name = "emojo"; paths = [ drgn neocat neofox dragn ]; }; pkgs.symlinkJoin {
name = "emojo";
paths = [
drgn
neocat
neofox
dragn
];
};
in in
{ {
options.services.depot.forgejo = { options.services.depot.forgejo = {
@ -77,13 +96,15 @@ in
enable = true; enable = true;
package = pkgs.forgejo.overrideAttrs (old: { package = pkgs.forgejo.overrideAttrs (old: {
patches = old.patches ++ (with depot.third_party.lix_forgejo.patches; [ patches =
upstream_link old.patches
signin_redirect ++ (with depot.third_party.lix_forgejo.patches; [
api_dont_notify upstream_link
forgejo_is_now_gerrit_native signin_redirect
forgejo_knows_about_gerrit api_dont_notify
]); forgejo_is_now_gerrit_native
forgejo_knows_about_gerrit
]);
}); });
# General settings. # General settings.
@ -294,10 +315,14 @@ in
services.mysql.enable = lib.mkForce true; services.mysql.enable = lib.mkForce true;
services.mysql.package = lib.mkForce pkgs.mariadb; services.mysql.package = lib.mkForce pkgs.mariadb;
systemd.tmpfiles.rules = let cfg = config.services.forgejo; in [ systemd.tmpfiles.rules =
"d '${cfg.customDir}/public/assets' 0750 ${cfg.user} ${cfg.group} - -" let
"d '${cfg.customDir}/public/assets/img' 0750 ${cfg.user} ${cfg.group} - -" cfg = config.services.forgejo;
"L+ '${cfg.customDir}/public/assets/img/emoji' - - - - ${emojo}" in
]; [
"d '${cfg.customDir}/public/assets' 0750 ${cfg.user} ${cfg.group} - -"
"d '${cfg.customDir}/public/assets/img' 0750 ${cfg.user} ${cfg.group} - -"
"L+ '${cfg.customDir}/public/assets/img/emoji' - - - - ${emojo}"
];
}; };
} }

View file

@ -1,24 +1,34 @@
# Configuration for the Gerrit autosubmit bot (//ops/gerrit-autosubmit) # Configuration for the Gerrit autosubmit bot (//ops/gerrit-autosubmit)
{ depot, pkgs, config, lib, ... }: {
depot,
pkgs,
config,
lib,
...
}:
let let
cfg = config.services.depot.gerrit-autosubmit; cfg = config.services.depot.gerrit-autosubmit;
description = "gerrit-autosubmit - autosubmit bot for Gerrit"; description = "gerrit-autosubmit - autosubmit bot for Gerrit";
mkStringOption = default: lib.mkOption { mkStringOption =
inherit default; default:
type = lib.types.str; lib.mkOption {
}; inherit default;
type = lib.types.str;
};
in in
{ {
options.services.depot.gerrit-autosubmit = { options.services.depot.gerrit-autosubmit = {
enable = lib.mkEnableOption description; enable = lib.mkEnableOption description;
gerritUrl = mkStringOption "https://cl.snix.dev"; gerritUrl = mkStringOption "https://cl.snix.dev";
secretsFile = with lib; mkOption { secretsFile =
description = "Path to a systemd EnvironmentFile containing secrets"; with lib;
default = config.age.secretsDir + "/gerrit-autosubmit"; mkOption {
type = types.str; description = "Path to a systemd EnvironmentFile containing secrets";
}; default = config.age.secretsDir + "/gerrit-autosubmit";
type = types.str;
};
}; };
config = lib.mkIf cfg.enable { config = lib.mkIf cfg.enable {

View file

@ -1,4 +1,9 @@
{ config, depot, lib, ... }: {
config,
depot,
lib,
...
}:
let let
cfg = config.services.depot.gerrit-webhook-to-irccat; cfg = config.services.depot.gerrit-webhook-to-irccat;
@ -21,8 +26,9 @@ in
config = lib.mkIf cfg.enable { config = lib.mkIf cfg.enable {
systemd.services.gerrit-webhook-to-irccat = { systemd.services.gerrit-webhook-to-irccat = {
serviceConfig = { serviceConfig = {
ExecStart = "${depot.ops.gerrit-webhook-to-irccat}/bin/gerrit-webhook-to-irccat" + ExecStart =
" -irccat-url ${cfg.irccatUrl}"; "${depot.ops.gerrit-webhook-to-irccat}/bin/gerrit-webhook-to-irccat"
+ " -irccat-url ${cfg.irccatUrl}";
Restart = "always"; Restart = "always";
RestartSec = 5; RestartSec = 5;
User = "gerrit-webhook-to-irccat"; User = "gerrit-webhook-to-irccat";

View file

@ -1,11 +1,21 @@
{ config, lib, pkgs, modulesPath, ... }: {
config,
lib,
pkgs,
modulesPath,
...
}:
let let
cfg = config.infra.hardware.hetzner-cloud; cfg = config.infra.hardware.hetzner-cloud;
inherit (lib) types mkOption mkEnableOption mkIf; inherit (lib)
types
mkOption
mkEnableOption
mkIf
;
in in
{ {
imports = imports = [ (modulesPath + "/profiles/qemu-guest.nix") ];
[ (modulesPath + "/profiles/qemu-guest.nix") ];
options.infra.hardware.hetzner-cloud = { options.infra.hardware.hetzner-cloud = {
enable = mkEnableOption "the Hetzner Cloud hardware profile"; enable = mkEnableOption "the Hetzner Cloud hardware profile";
@ -40,7 +50,10 @@ in
} }
]; ];
dns = [ "2a01:4ff:ff00::add:1" "2a01:4ff:ff00::add:2" ]; dns = [
"2a01:4ff:ff00::add:1"
"2a01:4ff:ff00::add:2"
];
}; };
boot.loader.systemd-boot.enable = true; boot.loader.systemd-boot.enable = true;

View file

@ -1,4 +1,9 @@
{ config, lib, pkgs, ... }: {
config,
lib,
pkgs,
...
}:
let let
cfg = config.services.irccat; cfg = config.services.irccat;
@ -35,16 +40,18 @@ in
wants = [ "network.target" ]; wants = [ "network.target" ];
serviceConfig = { serviceConfig = {
ExecStartPre = (pkgs.writeShellScript "merge-irccat-config" '' ExecStartPre = (
if [ ! -f "$CREDENTIALS_DIRECTORY/secrets" ]; then pkgs.writeShellScript "merge-irccat-config" ''
echo "irccat secrets file is missing" if [ ! -f "$CREDENTIALS_DIRECTORY/secrets" ]; then
exit 1 echo "irccat secrets file is missing"
fi exit 1
fi
# jq's * is the recursive merge operator # jq's * is the recursive merge operator
${pkgs.jq}/bin/jq -s '.[0] * .[1]' ${configJson} "$CREDENTIALS_DIRECTORY/secrets" \ ${pkgs.jq}/bin/jq -s '.[0] * .[1]' ${configJson} "$CREDENTIALS_DIRECTORY/secrets" \
> /var/lib/irccat/irccat.json > /var/lib/irccat/irccat.json
''); ''
);
ExecStart = "${pkgs.irccat}/bin/irccat"; ExecStart = "${pkgs.irccat}/bin/irccat";
DynamicUser = true; DynamicUser = true;
@ -57,4 +64,3 @@ in
}; };
}; };
} }

View file

@ -4,12 +4,18 @@
{ {
programs.ssh.knownHosts = { programs.ssh.knownHosts = {
public01 = { public01 = {
hostNames = [ "public01.infra.snix.dev" "git.snix.dev" ]; hostNames = [
"public01.infra.snix.dev"
"git.snix.dev"
];
publicKey = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAICzB7bqXWcv+sVokySvj1d74zRlVLSNqBw7/OY3c7QYd"; publicKey = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAICzB7bqXWcv+sVokySvj1d74zRlVLSNqBw7/OY3c7QYd";
}; };
gerrit01 = { gerrit01 = {
hostNames = [ "gerrit01.infra.snix.dev" "cl.snix.dev" ]; hostNames = [
"gerrit01.infra.snix.dev"
"cl.snix.dev"
];
publicKey = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIN+RCLAExaM5EC70UsCPMtDT1Cfa80Ux/vex95fLk9S4"; publicKey = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIN+RCLAExaM5EC70UsCPMtDT1Cfa80Ux/vex95fLk9S4";
}; };

View file

@ -1,5 +1,11 @@
# Gerrit configuration for the snix monorepo # Gerrit configuration for the snix monorepo
{ depot, pkgs, config, lib, ... }: {
depot,
pkgs,
config,
lib,
...
}:
let let
cfg = config.services.gerrit; cfg = config.services.gerrit;
@ -7,10 +13,12 @@ let
gerritPackage = depot.third_party.nix-gerrit.gerrit_3_12; gerritPackage = depot.third_party.nix-gerrit.gerrit_3_12;
gerritPlugins = depot.third_party.nix-gerrit.plugins_3_12; gerritPlugins = depot.third_party.nix-gerrit.plugins_3_12;
besadiiWithConfig = name: pkgs.writeShellScript "besadii-gerrit01" '' besadiiWithConfig =
export BESADII_CONFIG=/run/agenix/gerrit-besadii-config name:
exec -a ${name} ${depot.ops.besadii}/bin/besadii "$@" pkgs.writeShellScript "besadii-gerrit01" ''
''; export BESADII_CONFIG=/run/agenix/gerrit-besadii-config
exec -a ${name} ${depot.ops.besadii}/bin/besadii "$@"
'';
gerritHooks = pkgs.runCommand "gerrit-hooks" { } '' gerritHooks = pkgs.runCommand "gerrit-hooks" { } ''
mkdir -p $out mkdir -p $out

View file

@ -1,4 +1,5 @@
{ config, depot, ... }: { { config, depot, ... }:
{
imports = [ imports = [
depot.third_party.alertmanager-irc-relay.module depot.third_party.alertmanager-irc-relay.module
]; ];
@ -10,7 +11,10 @@
irc_port = 6697; irc_port = 6697;
irc_nickname = "silentfox"; irc_nickname = "silentfox";
irc_channels = [ irc_channels = [
{ name = "#snix"; password = "$CHANNEL_PASSWORD"; } {
name = "#snix";
password = "$CHANNEL_PASSWORD";
}
]; ];
}; };
environmentFiles = [ environmentFiles = [

View file

@ -1,15 +1,25 @@
{ depot {
, config depot,
, lib config,
, ... lib,
...
}: }:
let let
cfg = config.infra.monitoring.alloy; cfg = config.infra.monitoring.alloy;
inherit (lib) mkEnableOption mkOption mkIf types mapAttrs' nameValuePair; inherit (lib)
mkEnableOption
mkOption
mkIf
types
mapAttrs'
nameValuePair
;
in in
{ {
options.infra.monitoring.alloy = { options.infra.monitoring.alloy = {
enable = (mkEnableOption "Grafana Alloy") // { default = true; }; enable = (mkEnableOption "Grafana Alloy") // {
default = true;
};
exporters = mkOption { exporters = mkOption {
description = '' description = ''
@ -19,12 +29,17 @@ in
internally, which ends up exported as `job` label internally, which ends up exported as `job` label
on all metrics of that exporter. on all metrics of that exporter.
''; '';
type = types.attrsOf (types.submodule ({ config, name, ... }: { type = types.attrsOf (
options.port = mkOption { types.submodule (
description = "Exporter port"; { config, name, ... }:
type = types.int; {
}; options.port = mkOption {
})); description = "Exporter port";
type = types.int;
};
}
)
);
default = { }; default = { };
}; };
}; };
@ -70,8 +85,10 @@ in
} }
} }
''; '';
} // (mapAttrs' }
(name: v: nameValuePair "alloy/scrape_${name}.alloy" { // (mapAttrs' (
name: v:
nameValuePair "alloy/scrape_${name}.alloy" {
text = '' text = ''
prometheus.scrape "${name}" { prometheus.scrape "${name}" {
targets = [ targets = [
@ -80,8 +97,8 @@ in
forward_to = [prometheus.remote_write.mimir.receiver] forward_to = [prometheus.remote_write.mimir.receiver]
} }
''; '';
}) }
cfg.exporters); ) cfg.exporters);
systemd.services.alloy.serviceConfig = { systemd.services.alloy.serviceConfig = {
LoadCredential = [ LoadCredential = [

View file

@ -1,7 +1,8 @@
{ depot {
, config depot,
, lib config,
, ... lib,
...
}: }:
let let
cfg = config.services.depot.grafana; cfg = config.services.depot.grafana;

View file

@ -1,6 +1,7 @@
{ config {
, lib config,
, ... lib,
...
}: }:
let let
cfg = config.services.depot.loki; cfg = config.services.depot.loki;
@ -38,7 +39,10 @@ in
ring = { ring = {
kvstore.store = "memberlist"; kvstore.store = "memberlist";
# TODO: Such a ugly hack. # TODO: Such a ugly hack.
instance_interface_names = [ "enp1s0" "lo" ]; instance_interface_names = [
"enp1s0"
"lo"
];
}; };
replication_factor = 1; replication_factor = 1;
}; };

View file

@ -1,7 +1,8 @@
{ config {
, lib config,
, pkgs lib,
, ... pkgs,
...
}: }:
let let
cfg = config.services.depot.prometheus; cfg = config.services.depot.prometheus;
@ -9,15 +10,17 @@ let
mimirPort = config.services.mimir.configuration.server.http_listen_port; mimirPort = config.services.mimir.configuration.server.http_listen_port;
alerts = pkgs.runCommand "mimir-alerts-checked" alerts =
{ pkgs.runCommand "mimir-alerts-checked"
src = ./alerts; {
nativeBuildInputs = with pkgs; [ prometheus.cli ]; src = ./alerts;
} '' nativeBuildInputs = with pkgs; [ prometheus.cli ];
promtool check rules $src/* }
mkdir $out ''
cp -R $src $out/anonymous/ promtool check rules $src/*
''; mkdir $out
cp -R $src $out/anonymous/
'';
in in
{ {
options.services.depot.prometheus.enable = mkEnableOption "Prometheus scraper"; options.services.depot.prometheus.enable = mkEnableOption "Prometheus scraper";
@ -42,13 +45,34 @@ in
}; };
# TODO: Such a ugly hack. # TODO: Such a ugly hack.
distributor.ring.instance_interface_names = [ "enp1s0" "lo" ]; distributor.ring.instance_interface_names = [
ingester.ring.instance_interface_names = [ "enp1s0" "lo" ]; "enp1s0"
frontend.instance_interface_names = [ "enp1s0" "lo" ]; "lo"
query_scheduler.ring.instance_interface_names = [ "enp1s0" "lo" ]; ];
ruler.ring.instance_interface_names = [ "enp1s0" "lo" ]; ingester.ring.instance_interface_names = [
compactor.sharding_ring.instance_interface_names = [ "enp1s0" "lo" ]; "enp1s0"
store_gateway.sharding_ring.instance_interface_names = [ "enp1s0" "lo" ]; "lo"
];
frontend.instance_interface_names = [
"enp1s0"
"lo"
];
query_scheduler.ring.instance_interface_names = [
"enp1s0"
"lo"
];
ruler.ring.instance_interface_names = [
"enp1s0"
"lo"
];
compactor.sharding_ring.instance_interface_names = [
"enp1s0"
"lo"
];
store_gateway.sharding_ring.instance_interface_names = [
"enp1s0"
"lo"
];
memberlist = { memberlist = {
advertise_addr = "127.0.0.1"; advertise_addr = "127.0.0.1";
@ -91,11 +115,13 @@ in
receivers = [ receivers = [
{ {
name = "irc"; name = "irc";
webhook_configs = [{ webhook_configs = [
# Mimir can't expand environment variables in external config files, {
# so work around it. # Mimir can't expand environment variables in external config files,
url_file = "/run/credentials/mimir.service/webhook-url"; # so work around it.
}]; url_file = "/run/credentials/mimir.service/webhook-url";
}
];
} }
]; ];
}; };

View file

@ -1,6 +1,7 @@
{ config {
, lib config,
, ... lib,
...
}: }:
let let
cfg = config.services.depot.tempo; cfg = config.services.depot.tempo;
@ -48,11 +49,14 @@ in
}; };
}; };
systemd.services.tempo.serviceConfig.EnvironmentFile = [ config.age.secrets.tempo-environment.path ]; systemd.services.tempo.serviceConfig.EnvironmentFile = [
config.age.secrets.tempo-environment.path
];
services.nginx = { services.nginx = {
upstreams.tempo = { upstreams.tempo = {
servers."${config.services.tempo.settings.distributor.receivers.otlp.protocols.http.endpoint}" = { }; servers."${config.services.tempo.settings.distributor.receivers.otlp.protocols.http.endpoint}" =
{ };
extraConfig = "keepalive 16;"; extraConfig = "keepalive 16;";
}; };

View file

@ -5,14 +5,21 @@
# - restic's cache lives in /var/backup/restic/cache # - restic's cache lives in /var/backup/restic/cache
# - repository password lives in `config.age.secrets.restic-repository-password.path` # - repository password lives in `config.age.secrets.restic-repository-password.path`
# - object storage credentials in `config.age.secrets.restic-bucket-credentials.path` # - object storage credentials in `config.age.secrets.restic-bucket-credentials.path`
{ config, lib, pkgs, ... }: {
config,
lib,
pkgs,
...
}:
let let
cfg = config.services.depot.restic; cfg = config.services.depot.restic;
mkStringOption = default: lib.mkOption { mkStringOption =
inherit default; default:
type = lib.types.str; lib.mkOption {
}; inherit default;
type = lib.types.str;
};
in in
{ {
options.services.depot.restic = { options.services.depot.restic = {
@ -23,16 +30,20 @@ in
repository = mkStringOption config.networking.hostName; repository = mkStringOption config.networking.hostName;
interval = mkStringOption "hourly"; interval = mkStringOption "hourly";
paths = with lib; mkOption { paths =
description = "Directories that should be backed up"; with lib;
type = types.listOf types.str; mkOption {
}; description = "Directories that should be backed up";
type = types.listOf types.str;
};
exclude = with lib; mkOption { exclude =
description = "Files that should be excluded from backups"; with lib;
type = types.listOf types.str; mkOption {
default = [ ]; description = "Files that should be excluded from backups";
}; type = types.listOf types.str;
default = [ ];
};
}; };
config = lib.mkIf cfg.enable { config = lib.mkIf cfg.enable {
@ -51,8 +62,7 @@ in
RESTIC_PASSWORD_FILE = config.age.secrets.restic-repository-password.path; RESTIC_PASSWORD_FILE = config.age.secrets.restic-repository-password.path;
RESTIC_CACHE_DIR = "/var/backup/restic/cache"; RESTIC_CACHE_DIR = "/var/backup/restic/cache";
RESTIC_EXCLUDE_FILE = RESTIC_EXCLUDE_FILE = builtins.toFile "exclude-files" (lib.concatStringsSep "\n" cfg.exclude);
builtins.toFile "exclude-files" (lib.concatStringsSep "\n" cfg.exclude);
}; };
}; };

View file

@ -1,9 +1,10 @@
# Configuration for the snix buildkite agents. # Configuration for the snix buildkite agents.
{ config {
, depot config,
, pkgs depot,
, lib pkgs,
, ... lib,
...
}: }:
let let
@ -50,61 +51,56 @@ in
config = lib.mkIf cfg.enable { config = lib.mkIf cfg.enable {
# Run the Buildkite agents using the default upstream module. # Run the Buildkite agents using the default upstream module.
services.buildkite-agents = builtins.listToAttrs ( services.buildkite-agents = builtins.listToAttrs (
map map (n: rec {
(n: rec { name = "${hostname}-${toString n}";
name = "${hostname}-${toString n}"; value = {
value = inherit name;
{ enable = true;
inherit name; tokenPath = config.age.secretsDir + "/buildkite-agent-token";
enable = true; privateSshKeyPath = config.age.secretsDir + "/buildkite-private-key";
tokenPath = config.age.secretsDir + "/buildkite-agent-token"; hooks.post-command = "${buildkiteHooks}/bin/post-command";
privateSshKeyPath = config.age.secretsDir + "/buildkite-private-key"; tags.queue = "default";
hooks.post-command = "${buildkiteHooks}/bin/post-command"; hooks.environment = ''
tags.queue = "default"; export PATH=$PATH:/run/wrappers/bin
hooks.environment = '' '';
export PATH=$PATH:/run/wrappers/bin
'';
tags = { tags = {
# all agents support small jobs # all agents support small jobs
small = "true"; small = "true";
inherit hostname; inherit hostname;
large = if n <= cfg.largeSlots then "true" else "false"; large = if n <= cfg.largeSlots then "true" else "false";
}; };
runtimePackages = with pkgs; [ runtimePackages = with pkgs; [
bash bash
coreutils coreutils
credentialHelper credentialHelper
curl curl
git git
gnutar gnutar
gzip gzip
jq jq
nix nix
]; ];
}; };
}) }) agents
agents
); );
# Set up a group for all Buildkite agent users # Set up a group for all Buildkite agent users
users = { users = {
groups.buildkite-agents = { }; groups.buildkite-agents = { };
users = builtins.listToAttrs ( users = builtins.listToAttrs (
map map (n: rec {
(n: rec { name = "buildkite-agent-${hostname}-${toString n}";
name = "buildkite-agent-${hostname}-${toString n}"; value = {
value = { isSystemUser = true;
isSystemUser = true; group = lib.mkForce "buildkite-agents";
group = lib.mkForce "buildkite-agents"; extraGroups = [
extraGroups = [ name
name "docker"
"docker" ];
]; };
}; }) agents
})
agents
); );
}; };
}; };

View file

@ -2,7 +2,10 @@
{ {
config = { config = {
networking.firewall.allowedTCPPorts = [ 80 443 ]; networking.firewall.allowedTCPPorts = [
80
443
];
security.acme = { security.acme = {
acceptTerms = true; acceptTerms = true;

View file

@ -6,7 +6,8 @@
services.nginx = { services.nginx = {
upstreams.tempo = { upstreams.tempo = {
servers."${config.services.tempo.settings.distributor.receivers.otlp.protocols.http.endpoint}" = { }; servers."${config.services.tempo.settings.distributor.receivers.otlp.protocols.http.endpoint}" =
{ };
}; };
virtualHosts."tempo.snix.dev" = { virtualHosts."tempo.snix.dev" = {

View file

@ -1,42 +1,56 @@
# Helper functions for instantiating depot-compatible NixOS machines. # Helper functions for instantiating depot-compatible NixOS machines.
{ depot, lib, pkgs, ... }@args: {
depot,
lib,
pkgs,
...
}@args:
let inherit (lib) findFirst; let
in rec { inherit (lib) findFirst;
in
rec {
# This provides our standard set of arguments to all NixOS modules. # This provides our standard set of arguments to all NixOS modules.
baseModule = { ... }: { baseModule =
nix.nixPath = { ... }:
let {
# Due to nixpkgsBisectPath, pkgs.path is not always in the nix store nix.nixPath =
nixpkgsStorePath = let
if lib.hasPrefix builtins.storeDir (toString pkgs.path) # Due to nixpkgsBisectPath, pkgs.path is not always in the nix store
then builtins.storePath pkgs.path # nixpkgs is already in the store nixpkgsStorePath =
else pkgs.path; # we need to dump nixpkgs to the store either way if lib.hasPrefix builtins.storeDir (toString pkgs.path) then
in builtins.storePath pkgs.path # nixpkgs is already in the store
[ else
("nixos=" + nixpkgsStorePath) pkgs.path; # we need to dump nixpkgs to the store either way
("nixpkgs=" + nixpkgsStorePath) in
]; [
}; ("nixos=" + nixpkgsStorePath)
("nixpkgs=" + nixpkgsStorePath)
nixosFor = configuration: (depot.third_party.nixos { ];
configuration = { ... }: {
imports = [
baseModule
configuration
];
}; };
specialArgs = { nixosFor =
inherit (args) depot; configuration:
}; (depot.third_party.nixos {
}); configuration =
{ ... }:
{
imports = [
baseModule
configuration
];
};
findSystem = hostname: specialArgs = {
(findFirst inherit (args) depot;
(system: system.config.networking.hostName == hostname) };
(throw "${hostname} is not a known NixOS host") });
(map nixosFor depot.ops.machines.all-systems));
findSystem =
hostname:
(findFirst (
system: system.config.networking.hostName == hostname
) (throw "${hostname} is not a known NixOS host") (map nixosFor depot.ops.machines.all-systems));
# Systems that should be built in CI # Systems that should be built in CI
archivistEC2System = nixosFor depot.ops.machines.archivist-ec2; archivistEC2System = nixosFor depot.ops.machines.archivist-ec2;

View file

@ -1,6 +1,11 @@
# This file configures the primary build pipeline used for the # This file configures the primary build pipeline used for the
# top-level list of depot targets. # top-level list of depot targets.
{ depot, pkgs, externalArgs, ... }: {
depot,
pkgs,
externalArgs,
...
}:
let let
pipeline = depot.nix.buildkite.mkPipeline { pipeline = depot.nix.buildkite.mkPipeline {
@ -8,9 +13,10 @@ let
drvTargets = depot.ci.targets; drvTargets = depot.ci.targets;
parentTargetMap = parentTargetMap =
if (externalArgs ? parentTargetMap) if (externalArgs ? parentTargetMap) then
then builtins.fromJSON (builtins.readFile externalArgs.parentTargetMap) builtins.fromJSON (builtins.readFile externalArgs.parentTargetMap)
else { }; else
{ };
postBuildSteps = [ postBuildSteps = [
# After successful builds, create a gcroot for builds on canon. # After successful builds, create a gcroot for builds on canon.

View file

@ -1,3 +1,5 @@
args: args:
let mkSecrets = import ./mkSecrets.nix args; in let
mkSecrets = import ./mkSecrets.nix args;
in
mkSecrets ./. (import ./secrets.nix) // { inherit mkSecrets; } mkSecrets ./. (import ./secrets.nix) // { inherit mkSecrets; }

View file

@ -12,10 +12,12 @@ let
agePubkey = types.typedef "age pubkey" (s: isString s && hasPrefix "age" s); agePubkey = types.typedef "age pubkey" (s: isString s && hasPrefix "age" s);
agenixSecret = types.struct "agenixSecret" { agenixSecret = types.struct "agenixSecret" {
publicKeys = types.listOf (types.union [ publicKeys = types.listOf (
sshPubkey types.union [
agePubkey sshPubkey
]); agePubkey
]
);
}; };
in in

View file

@ -20,13 +20,21 @@ let
superadmins = raito ++ edef ++ flokli; superadmins = raito ++ edef ++ flokli;
allDefault.publicKeys = superadmins ++ [ gerrit01 public01 build01 meta01 ]; allDefault.publicKeys = superadmins ++ [
gerrit01
public01
build01
meta01
];
terraform.publicKeys = superadmins; terraform.publicKeys = superadmins;
gerrit01Default.publicKeys = superadmins ++ [ gerrit01 ]; gerrit01Default.publicKeys = superadmins ++ [ gerrit01 ];
public01Default.publicKeys = superadmins ++ [ public01 ]; public01Default.publicKeys = superadmins ++ [ public01 ];
build01Default.publicKeys = superadmins ++ [ build01 ]; build01Default.publicKeys = superadmins ++ [ build01 ];
meta01Default.publicKeys = superadmins ++ [ meta01 ]; meta01Default.publicKeys = superadmins ++ [ meta01 ];
ciDefault.publicKeys = superadmins ++ [ gerrit01 build01 ]; ciDefault.publicKeys = superadmins ++ [
gerrit01
build01
];
in in
{ {
"grafana-agent-password.age" = allDefault; "grafana-agent-password.age" = allDefault;

View file

@ -1,4 +1,5 @@
{ ... }: { { ... }:
{
flokli = [ flokli = [
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIPTVTXOutUZZjXLB0lUSgeKcSY/8mxKkC0ingGK1whD2 flokli" "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIPTVTXOutUZZjXLB0lUSgeKcSY/8mxKkC0ingGK1whD2 flokli"
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIE6a15p9HLSrawsMTd2UQGAiM7r7VdyrfSRyzwRYTgWT flokli@m2air" "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIE6a15p9HLSrawsMTd2UQGAiM7r7VdyrfSRyzwRYTgWT flokli@m2air"

File diff suppressed because it is too large Load diff

View file

@ -16,22 +16,25 @@ rec {
# A kernel with virtiofs support baked in # A kernel with virtiofs support baked in
# TODO: make a smaller kernel, we don't need a gazillion filesystems and # TODO: make a smaller kernel, we don't need a gazillion filesystems and
# device drivers in it. # device drivers in it.
kernel = pkgs.buildLinux ({ } // { kernel = pkgs.buildLinux (
inherit (pkgs.linuxPackages_latest.kernel) src version modDirVersion; { }
autoModules = false; // {
kernelPreferBuiltin = true; inherit (pkgs.linuxPackages_latest.kernel) src version modDirVersion;
ignoreConfigErrors = true; autoModules = false;
kernelPatches = [ ]; kernelPreferBuiltin = true;
structuredExtraConfig = with pkgs.lib.kernel; { ignoreConfigErrors = true;
FUSE_FS = option yes; kernelPatches = [ ];
DAX_DRIVER = option yes; structuredExtraConfig = with pkgs.lib.kernel; {
DAX = option yes; FUSE_FS = option yes;
FS_DAX = option yes; DAX_DRIVER = option yes;
VIRTIO_FS = option yes; DAX = option yes;
VIRTIO = option yes; FS_DAX = option yes;
ZONE_DEVICE = option yes; VIRTIO_FS = option yes;
}; VIRTIO = option yes;
}); ZONE_DEVICE = option yes;
};
}
);
# A build framework for minimal initrds # A build framework for minimal initrds
uroot = pkgs.buildGoModule rec { uroot = pkgs.buildGoModule rec {

View file

@ -1,4 +1,9 @@
{ depot, pkgs, lib, ... }: {
depot,
pkgs,
lib,
...
}:
let let
# Seed a snix-store with the specified path, then start a VM with the # Seed a snix-store with the specified path, then start a VM with the
@ -6,46 +11,48 @@ let
# Allows customizing the cmdline, which can be used to list files, # Allows customizing the cmdline, which can be used to list files,
# or specify what init should be booted. # or specify what init should be booted.
mkBootTest = mkBootTest =
{ blobServiceAddr ? "memory://" {
, directoryServiceAddr ? "memory://" blobServiceAddr ? "memory://",
, pathInfoServiceAddr ? "memory://" directoryServiceAddr ? "memory://",
pathInfoServiceAddr ? "memory://",
# The path to import. # The path to import.
, path path,
# Whether the path should be imported as a closure. # Whether the path should be imported as a closure.
# If false, importPathName must be specified. # If false, importPathName must be specified.
, isClosure ? false isClosure ? false,
# Whether to use nar-bridge to upload, rather than snix-store copy. # Whether to use nar-bridge to upload, rather than snix-store copy.
# using nar-bridge currently is "slower", as the `pkgs.mkBinaryCache` build # using nar-bridge currently is "slower", as the `pkgs.mkBinaryCache` build
# takes quite some time. # takes quite some time.
, useNarBridge ? false useNarBridge ? false,
, importPathName ? null importPathName ? null,
# Commands to run before starting the snix-daemon. Useful to provide # Commands to run before starting the snix-daemon. Useful to provide
# auxillary mock services. # auxillary mock services.
, preStart ? "" preStart ? "",
# The cmdline to pass to the VM. # The cmdline to pass to the VM.
# Defaults to snix.find, which lists all files in the store. # Defaults to snix.find, which lists all files in the store.
, vmCmdline ? "snix.find" vmCmdline ? "snix.find",
# The string we expect to find in the VM output. # The string we expect to find in the VM output.
# Defaults the value of `path` (the store path we upload). # Defaults the value of `path` (the store path we upload).
, assertVMOutput ? path assertVMOutput ? path,
}: }:
assert isClosure -> importPathName == null; assert isClosure -> importPathName == null;
assert (!isClosure) -> importPathName != null; assert (!isClosure) -> importPathName != null;
pkgs.stdenv.mkDerivation ({ pkgs.stdenv.mkDerivation (
{
name = "run-vm"; name = "run-vm";
nativeBuildInputs = [ nativeBuildInputs = [
depot.snix.store depot.snix.store
depot.snix.boot.runVM depot.snix.boot.runVM
] ++ lib.optionals (isClosure && useNarBridge) [ ]
++ lib.optionals (isClosure && useNarBridge) [
depot.snix.nar-bridge depot.snix.nar-bridge
pkgs.curl pkgs.curl
pkgs.rush-parallel pkgs.rush-parallel
@ -76,17 +83,20 @@ let
export BLOB_SERVICE_ADDR=grpc+unix://$PWD/snix-store.sock export BLOB_SERVICE_ADDR=grpc+unix://$PWD/snix-store.sock
export DIRECTORY_SERVICE_ADDR=grpc+unix://$PWD/snix-store.sock export DIRECTORY_SERVICE_ADDR=grpc+unix://$PWD/snix-store.sock
export PATH_INFO_SERVICE_ADDR=grpc+unix://$PWD/snix-store.sock export PATH_INFO_SERVICE_ADDR=grpc+unix://$PWD/snix-store.sock
'' + lib.optionalString (!isClosure) '' ''
+ lib.optionalString (!isClosure) ''
echo "Importing ${path} into snix-store with name ${importPathName}" echo "Importing ${path} into snix-store with name ${importPathName}"
cp -R ${path} ${importPathName} cp -R ${path} ${importPathName}
outpath=$(snix-store import ${importPathName}) outpath=$(snix-store import ${importPathName})
echo "imported to $outpath" echo "imported to $outpath"
'' + lib.optionalString (isClosure && !useNarBridge) '' ''
+ lib.optionalString (isClosure && !useNarBridge) ''
echo "Copying closure ${path}" echo "Copying closure ${path}"
# This picks up the `closure` key in `$NIX_ATTRS_JSON_FILE` automatically. # This picks up the `closure` key in `$NIX_ATTRS_JSON_FILE` automatically.
snix-store --otlp=false copy snix-store --otlp=false copy
'' + lib.optionalString (isClosure && useNarBridge) '' ''
+ lib.optionalString (isClosure && useNarBridge) ''
echo "Starting nar-bridge" echo "Starting nar-bridge"
nar-bridge \ nar-bridge \
--otlp=false \ --otlp=false \
@ -122,7 +132,8 @@ let
# In the future, we might want to make this behaviour configurable, # In the future, we might want to make this behaviour configurable,
# and disable checking here, to keep the logic simple. # and disable checking here, to keep the logic simple.
ls -d $to_upload/*.narinfo | rush 'curl -s -T - --unix-socket $PWD/nar-bridge.sock http://localhost:9000/$(basename {}) < {}' ls -d $to_upload/*.narinfo | rush 'curl -s -T - --unix-socket $PWD/nar-bridge.sock http://localhost:9000/$(basename {}) < {}'
'' + '' ''
+ ''
# Invoke a VM using snix as the backing store, ensure the outpath appears in its listing. # Invoke a VM using snix as the backing store, ensure the outpath appears in its listing.
echo "Starting VM" echo "Starting VM"
@ -138,113 +149,130 @@ let
meta.ci.buildkiteExtraStepArgs = { meta.ci.buildkiteExtraStepArgs = {
retry.automatic = true; retry.automatic = true;
}; };
} // lib.optionalAttrs (isClosure && !useNarBridge) { }
// lib.optionalAttrs (isClosure && !useNarBridge) {
__structuredAttrs = true; __structuredAttrs = true;
exportReferencesGraph.closure = [ path ]; exportReferencesGraph.closure = [ path ];
}); }
);
testSystem = (pkgs.nixos { testSystem =
# Set some options necessary to evaluate. (pkgs.nixos {
boot.loader.systemd-boot.enable = true; # Set some options necessary to evaluate.
# TODO: figure out how to disable this without causing eval to fail boot.loader.systemd-boot.enable = true;
fileSystems."/" = { # TODO: figure out how to disable this without causing eval to fail
device = "/dev/root"; fileSystems."/" = {
fsType = "tmpfs"; device = "/dev/root";
}; fsType = "tmpfs";
};
services.getty.helpLine = "Onwards and upwards."; services.getty.helpLine = "Onwards and upwards.";
systemd.services.do-shutdown = { systemd.services.do-shutdown = {
after = [ "getty.target" ]; after = [ "getty.target" ];
description = "Shut down again"; description = "Shut down again";
wantedBy = [ "multi-user.target" ]; wantedBy = [ "multi-user.target" ];
serviceConfig.Type = "oneshot"; serviceConfig.Type = "oneshot";
script = "/run/current-system/sw/bin/systemctl poweroff --when=+10s"; script = "/run/current-system/sw/bin/systemctl poweroff --when=+10s";
}; };
# Don't warn about stateVersion. # Don't warn about stateVersion.
system.stateVersion = "24.05"; system.stateVersion = "24.05";
# Speed-up evaluation and building. # Speed-up evaluation and building.
documentation.enable = lib.mkForce false; documentation.enable = lib.mkForce false;
}).config.system.build.toplevel; }).config.system.build.toplevel;
in in
depot.nix.readTree.drvTargets { depot.nix.readTree.drvTargets {
docs-memory = (mkBootTest { docs-memory = (
path = ../../docs; mkBootTest {
importPathName = "docs"; path = ../../docs;
}); importPathName = "docs";
docs-persistent = (mkBootTest { }
blobServiceAddr = "objectstore+file:///build/blobs"; );
directoryServiceAddr = "redb:///build/directories.redb"; docs-persistent = (
pathInfoServiceAddr = "redb:///build/pathinfo.redb"; mkBootTest {
path = ../../docs; blobServiceAddr = "objectstore+file:///build/blobs";
importPathName = "docs"; directoryServiceAddr = "redb:///build/directories.redb";
}); pathInfoServiceAddr = "redb:///build/pathinfo.redb";
path = ../../docs;
importPathName = "docs";
}
);
closure-snix = (mkBootTest { closure-snix = (
blobServiceAddr = "objectstore+file:///build/blobs"; mkBootTest {
path = depot.snix.store; blobServiceAddr = "objectstore+file:///build/blobs";
isClosure = true; path = depot.snix.store;
}); isClosure = true;
}
);
closure-nixos = (mkBootTest { closure-nixos = (
blobServiceAddr = "objectstore+file:///build/blobs"; mkBootTest {
pathInfoServiceAddr = "redb:///build/pathinfo.redb"; blobServiceAddr = "objectstore+file:///build/blobs";
directoryServiceAddr = "redb:///build/directories.redb"; pathInfoServiceAddr = "redb:///build/pathinfo.redb";
path = testSystem; directoryServiceAddr = "redb:///build/directories.redb";
isClosure = true; path = testSystem;
vmCmdline = "init=${testSystem}/init panic=-1"; # reboot immediately on panic isClosure = true;
assertVMOutput = "Onwards and upwards."; vmCmdline = "init=${testSystem}/init panic=-1"; # reboot immediately on panic
}); assertVMOutput = "Onwards and upwards.";
}
);
closure-nixos-bigtable = (mkBootTest { closure-nixos-bigtable = (
blobServiceAddr = "objectstore+file:///build/blobs"; mkBootTest {
directoryServiceAddr = "bigtable://instance-1?project_id=project-1&table_name=directories&family_name=cf1"; blobServiceAddr = "objectstore+file:///build/blobs";
pathInfoServiceAddr = "bigtable://instance-1?project_id=project-1&table_name=pathinfos&family_name=cf1"; directoryServiceAddr = "bigtable://instance-1?project_id=project-1&table_name=directories&family_name=cf1";
path = testSystem; pathInfoServiceAddr = "bigtable://instance-1?project_id=project-1&table_name=pathinfos&family_name=cf1";
useNarBridge = true; path = testSystem;
preStart = '' useNarBridge = true;
${pkgs.cbtemulator}/bin/cbtemulator -address $PWD/cbtemulator.sock & preStart = ''
timeout 22 sh -c 'until [ -e $PWD/cbtemulator.sock ]; do sleep 1; done' ${pkgs.cbtemulator}/bin/cbtemulator -address $PWD/cbtemulator.sock &
timeout 22 sh -c 'until [ -e $PWD/cbtemulator.sock ]; do sleep 1; done'
export BIGTABLE_EMULATOR_HOST=unix://$PWD/cbtemulator.sock export BIGTABLE_EMULATOR_HOST=unix://$PWD/cbtemulator.sock
${pkgs.google-cloud-bigtable-tool}/bin/cbt -instance instance-1 -project project-1 createtable directories ${pkgs.google-cloud-bigtable-tool}/bin/cbt -instance instance-1 -project project-1 createtable directories
${pkgs.google-cloud-bigtable-tool}/bin/cbt -instance instance-1 -project project-1 createfamily directories cf1 ${pkgs.google-cloud-bigtable-tool}/bin/cbt -instance instance-1 -project project-1 createfamily directories cf1
${pkgs.google-cloud-bigtable-tool}/bin/cbt -instance instance-1 -project project-1 createtable pathinfos ${pkgs.google-cloud-bigtable-tool}/bin/cbt -instance instance-1 -project project-1 createtable pathinfos
${pkgs.google-cloud-bigtable-tool}/bin/cbt -instance instance-1 -project project-1 createfamily pathinfos cf1 ${pkgs.google-cloud-bigtable-tool}/bin/cbt -instance instance-1 -project project-1 createfamily pathinfos cf1
''; '';
isClosure = true; isClosure = true;
vmCmdline = "init=${testSystem}/init panic=-1"; # reboot immediately on panic vmCmdline = "init=${testSystem}/init panic=-1"; # reboot immediately on panic
assertVMOutput = "Onwards and upwards."; assertVMOutput = "Onwards and upwards.";
}); }
);
closure-nixos-s3 = (mkBootTest { closure-nixos-s3 = (
blobServiceAddr = "objectstore+s3://mybucket/blobs?aws_access_key_id=myaccesskey&aws_secret_access_key=supersecret&aws_endpoint_url=http%3A%2F%2Flocalhost%3A9000&aws_allow_http=1"; mkBootTest {
# we cannot use s3 here yet without any caching layer, as we don't allow "deeper" access to directories (non-root nodes) blobServiceAddr = "objectstore+s3://mybucket/blobs?aws_access_key_id=myaccesskey&aws_secret_access_key=supersecret&aws_endpoint_url=http%3A%2F%2Flocalhost%3A9000&aws_allow_http=1";
# directoryServiceAddr = "objectstore+s3://mybucket/directories?aws_access_key_id=myaccesskey&aws_secret_access_key=supersecret&endpoint=http%3A%2F%2Flocalhost%3A9000&aws_allow_http=1"; # we cannot use s3 here yet without any caching layer, as we don't allow "deeper" access to directories (non-root nodes)
directoryServiceAddr = "memory://"; # directoryServiceAddr = "objectstore+s3://mybucket/directories?aws_access_key_id=myaccesskey&aws_secret_access_key=supersecret&endpoint=http%3A%2F%2Flocalhost%3A9000&aws_allow_http=1";
pathInfoServiceAddr = "memory://"; directoryServiceAddr = "memory://";
path = testSystem; pathInfoServiceAddr = "memory://";
useNarBridge = true; path = testSystem;
preStart = '' useNarBridge = true;
MINIO_ACCESS_KEY=myaccesskey MINIO_SECRET_KEY=supersecret MINIO_ADDRESS=127.0.0.1:9000 ${pkgs.minio}/bin/minio server $(mktemp -d) & preStart = ''
timeout 22 sh -c 'until ${pkgs.netcat}/bin/nc -z $0 $1; do sleep 1; done' localhost 9000 MINIO_ACCESS_KEY=myaccesskey MINIO_SECRET_KEY=supersecret MINIO_ADDRESS=127.0.0.1:9000 ${pkgs.minio}/bin/minio server $(mktemp -d) &
mc_config_dir=$(mktemp -d) timeout 22 sh -c 'until ${pkgs.netcat}/bin/nc -z $0 $1; do sleep 1; done' localhost 9000
${pkgs.minio-client}/bin/mc --config-dir $mc_config_dir alias set 'myminio' 'http://127.0.0.1:9000' 'myaccesskey' 'supersecret' mc_config_dir=$(mktemp -d)
${pkgs.minio-client}/bin/mc --config-dir $mc_config_dir mb myminio/mybucket ${pkgs.minio-client}/bin/mc --config-dir $mc_config_dir alias set 'myminio' 'http://127.0.0.1:9000' 'myaccesskey' 'supersecret'
''; ${pkgs.minio-client}/bin/mc --config-dir $mc_config_dir mb myminio/mybucket
isClosure = true; '';
vmCmdline = "init=${testSystem}/init panic=-1"; # reboot immediately on panic isClosure = true;
assertVMOutput = "Onwards and upwards."; vmCmdline = "init=${testSystem}/init panic=-1"; # reboot immediately on panic
}); assertVMOutput = "Onwards and upwards.";
}
);
closure-nixos-nar-bridge = (mkBootTest { closure-nixos-nar-bridge = (
blobServiceAddr = "objectstore+file:///build/blobs"; mkBootTest {
path = testSystem; blobServiceAddr = "objectstore+file:///build/blobs";
useNarBridge = true; path = testSystem;
isClosure = true; useNarBridge = true;
vmCmdline = "init=${testSystem}/init panic=-1"; # reboot immediately on panic isClosure = true;
assertVMOutput = "Onwards and upwards."; vmCmdline = "init=${testSystem}/init panic=-1"; # reboot immediately on panic
}); assertVMOutput = "Onwards and upwards.";
}
);
} }

View file

@ -9,25 +9,26 @@ in
name = "build-go"; name = "build-go";
src = depot.third_party.gitignoreSource ./.; src = depot.third_party.gitignoreSource ./.;
vendorHash = "sha256:1j652an8ir1ybyj21znaipsir7mbs3v972mw27ppsjz9dgh2crx6"; vendorHash = "sha256:1j652an8ir1ybyj21znaipsir7mbs3v972mw27ppsjz9dgh2crx6";
}).overrideAttrs (_: { }).overrideAttrs
meta.ci.extraSteps = { (_: {
check = { meta.ci.extraSteps = {
label = ":water_buffalo: ensure generated protobuf files match"; check = {
needsOutput = true; label = ":water_buffalo: ensure generated protobuf files match";
command = pkgs.writeShellScript "pb-go-check" '' needsOutput = true;
${regenerate} command = pkgs.writeShellScript "pb-go-check" ''
if [[ -n "$(git status --porcelain -unormal)" ]]; then ${regenerate}
echo "-----------------------------" if [[ -n "$(git status --porcelain -unormal)" ]]; then
echo ".pb.go files need to be updated, mg run //snix/build-go/regenerate" echo "-----------------------------"
echo "-----------------------------" echo ".pb.go files need to be updated, mg run //snix/build-go/regenerate"
git status -unormal echo "-----------------------------"
exit 1 git status -unormal
fi exit 1
''; fi
alwaysRun = true; '';
alwaysRun = true;
};
}; };
}; # https://git.snix.dev/snix/snix/issues/60
# https://git.snix.dev/snix/snix/issues/60 meta.ci.skip = true;
meta.ci.skip = true; passthru.regenerate = regenerate;
passthru.regenerate = regenerate; })
})

View file

@ -2,10 +2,15 @@
(depot.snix.crates.workspaceMembers.snix-build.build.override { (depot.snix.crates.workspaceMembers.snix-build.build.override {
runTests = true; runTests = true;
}).overrideAttrs (old: rec { }).overrideAttrs
meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru); (old: rec {
passthru = old.passthru // (depot.snix.utils.mkFeaturePowerset { meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (
inherit (old) crateName; lib.attrNames passthru
features = [ "tonic-reflection" ]; );
}); passthru =
}) old.passthru
// (depot.snix.utils.mkFeaturePowerset {
inherit (old) crateName;
features = [ "tonic-reflection" ];
});
})

View file

@ -1,4 +1,9 @@
{ depot, pkgs, lib, ... }: {
depot,
pkgs,
lib,
...
}:
let let
protos = lib.sourceByRegex depot.path.origSrc [ protos = lib.sourceByRegex depot.path.origSrc [
"buf.yaml" "buf.yaml"

View file

@ -9,23 +9,24 @@ in
name = "castore-go"; name = "castore-go";
src = depot.third_party.gitignoreSource ./.; src = depot.third_party.gitignoreSource ./.;
vendorHash = "sha256:03wwzk7irlb05y0zjfmpp5c2dxhcpnmfc169g05sn6d3ni07aly8"; vendorHash = "sha256:03wwzk7irlb05y0zjfmpp5c2dxhcpnmfc169g05sn6d3ni07aly8";
}).overrideAttrs (_: { }).overrideAttrs
meta.ci.extraSteps = { (_: {
check = { meta.ci.extraSteps = {
label = ":water_buffalo: ensure generated protobuf files match"; check = {
needsOutput = true; label = ":water_buffalo: ensure generated protobuf files match";
command = pkgs.writeShellScript "pb-go-check" '' needsOutput = true;
${regenerate} command = pkgs.writeShellScript "pb-go-check" ''
if [[ -n "$(git status --porcelain -unormal)" ]]; then ${regenerate}
echo "-----------------------------" if [[ -n "$(git status --porcelain -unormal)" ]]; then
echo ".pb.go files need to be updated, mg run //snix/castore-go/regenerate" echo "-----------------------------"
echo "-----------------------------" echo ".pb.go files need to be updated, mg run //snix/castore-go/regenerate"
git status -unormal echo "-----------------------------"
exit 1 git status -unormal
fi exit 1
''; fi
alwaysRun = true; '';
alwaysRun = true;
};
}; };
}; passthru.regenerate = regenerate;
passthru.regenerate = regenerate; })
})

View file

@ -1,28 +1,51 @@
{ depot, pkgs, lib, ... }: {
depot,
pkgs,
lib,
...
}:
(depot.snix.crates.workspaceMembers.snix-castore.build.override { (depot.snix.crates.workspaceMembers.snix-castore.build.override {
runTests = true; runTests = true;
testPreRun = '' testPreRun = ''
export SSL_CERT_FILE=/dev/null export SSL_CERT_FILE=/dev/null
''; '';
}).overrideAttrs (old: rec { }).overrideAttrs
meta.ci.targets = [ "integration-tests" ] ++ lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru); (old: rec {
passthru = (depot.snix.utils.mkFeaturePowerset { meta.ci.targets = [
inherit (old) crateName; "integration-tests"
features = ([ "cloud" "fuse" "tonic-reflection" "xp-composition-url-refs" ] ]
# virtiofs feature currently fails to build on Darwin ++ lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
++ lib.optional pkgs.stdenv.isLinux "virtiofs"); passthru =
override.testPreRun = '' (depot.snix.utils.mkFeaturePowerset {
export SSL_CERT_FILE=/dev/null inherit (old) crateName;
''; features = (
}) // { [
integration-tests = depot.snix.crates.workspaceMembers.${old.crateName}.build.override (old: { "cloud"
runTests = true; "fuse"
testPreRun = '' "tonic-reflection"
export SSL_CERT_FILE=/dev/null "xp-composition-url-refs"
export PATH="$PATH:${pkgs.lib.makeBinPath [ pkgs.cbtemulator pkgs.google-cloud-bigtable-tool ]}" ]
''; # virtiofs feature currently fails to build on Darwin
features = old.features ++ [ "integration" ]; ++ lib.optional pkgs.stdenv.isLinux "virtiofs"
}); );
}; override.testPreRun = ''
}) export SSL_CERT_FILE=/dev/null
'';
})
// {
integration-tests = depot.snix.crates.workspaceMembers.${old.crateName}.build.override (old: {
runTests = true;
testPreRun = ''
export SSL_CERT_FILE=/dev/null
export PATH="$PATH:${
pkgs.lib.makeBinPath [
pkgs.cbtemulator
pkgs.google-cloud-bigtable-tool
]
}"
'';
features = old.features ++ [ "integration" ];
});
};
})

View file

@ -1,4 +1,9 @@
{ depot, pkgs, lib, ... }: {
depot,
pkgs,
lib,
...
}:
let let
protos = lib.sourceByRegex depot.path.origSrc [ protos = lib.sourceByRegex depot.path.origSrc [
"buf.yaml" "buf.yaml"

View file

@ -1,111 +1,176 @@
{ depot, pkgs, lib, ... }: {
depot,
pkgs,
lib,
...
}:
(depot.snix.crates.workspaceMembers.snix-cli.build.override { (depot.snix.crates.workspaceMembers.snix-cli.build.override {
runTests = true; runTests = true;
testPreRun = '' testPreRun = ''
export SSL_CERT_FILE=/dev/null export SSL_CERT_FILE=/dev/null
''; '';
}).overrideAttrs (finalAttrs: previousAttrs: }).overrideAttrs
(
finalAttrs: previousAttrs:
let let
snix-cli = finalAttrs.finalPackage; snix-cli = finalAttrs.finalPackage;
benchmark-gnutime-format-string = benchmark-gnutime-format-string =
description: description:
"Benchmark: " + "Benchmark: "
(builtins.toJSON { + (builtins.toJSON {
"${description}" = { "${description}" = {
kbytes = "%M"; kbytes = "%M";
system = "%S"; system = "%S";
user = "%U"; user = "%U";
};
});
# You can run the benchmark with a simple `nix run`, like:
#
# nix-build -A snix.cli.meta.ci.extraSteps.benchmark-nixpkgs-cross-hello-outpath
#
# TODO(amjoseph): store these results someplace more durable, like git trailers
#
mkExprBenchmark =
{ expr, description }:
let
name = "snix-cli-benchmark-${description}";
in
(pkgs.runCommand name { } ''
export SSL_CERT_FILE=/dev/null
${lib.escapeShellArgs [
"${pkgs.time}/bin/time"
"--format"
"${benchmark-gnutime-format-string description}"
"${snix-cli}/bin/snix"
"--no-warnings"
"-E"
expr
]}
touch $out
'');
mkNixpkgsBenchmark =
attrpath:
mkExprBenchmark {
description = builtins.replaceStrings [ ".drv" ] [ "-drv" ] attrpath;
expr = "(import ${pkgs.path} {}).${attrpath}";
};
# Constructs a Derivation invoking snix-cli inside a build, ensures the
# calculated snix output path matches what's passed in externally.
mkNixpkgsEvalTest =
{
attrPath ? null, # An attribute that must already be accessible from `pkgs`. Should evaluate to a store path.
expr ? null, # A Nix expression that should evaluate to a store path.
expectedPath, # The expected store path that should match one of the above.
}:
assert lib.assertMsg (attrPath != null || expr != null) "Either 'attrPath' or 'expr' must be set.";
let
name = "snix-eval-test-${
builtins.replaceStrings [ ".drv" ] [ "-drv" ] (if expr != null then "custom-expr" else attrPath)
}";
in
(pkgs.runCommand name { } ''
export SSL_CERT_FILE=/dev/null
SNIX_OUTPUT=$(${snix-cli}/bin/snix --no-warnings -E '${
if expr != null then expr else "(import ${pkgs.path} {}).${attrPath}"
}')
EXPECTED='${
# the verbatim expected Snix output:
"=> \"${builtins.unsafeDiscardStringContext expectedPath}\" :: string"
}'
echo "Snix output: ''${SNIX_OUTPUT}"
if [ "$SNIX_OUTPUT" != "$EXPECTED" ]; then
echo "Correct would have been ''${EXPECTED}"
exit 1
fi
echo "Output was correct."
touch $out
'');
benchmarks = {
benchmark-hello = (mkNixpkgsBenchmark "hello.outPath");
benchmark-cross-hello = (mkNixpkgsBenchmark "pkgsCross.aarch64-multiplatform.hello.outPath");
benchmark-firefox = (mkNixpkgsBenchmark "firefox.outPath");
benchmark-cross-firefox = (mkNixpkgsBenchmark "pkgsCross.aarch64-multiplatform.firefox.outPath");
# Example used for benchmarking LightSpan::Delayed in commit bf286a54bc2ac5eeb78c3d5c5ae66e9af24d74d4
benchmark-nixpkgs-attrnames = (
mkExprBenchmark {
expr = "builtins.length (builtins.attrNames (import ${pkgs.path} {}))";
description = "nixpkgs-attrnames";
}
);
}; };
});
# You can run the benchmark with a simple `nix run`, like: evalTests = {
# eval-nixpkgs-stdenv-drvpath = (
# nix-build -A snix.cli.meta.ci.extraSteps.benchmark-nixpkgs-cross-hello-outpath mkNixpkgsEvalTest {
# attrPath = "stdenv.drvPath";
# TODO(amjoseph): store these results someplace more durable, like git trailers expectedPath = pkgs.stdenv.drvPath;
# }
mkExprBenchmark = { expr, description }: );
let name = "snix-cli-benchmark-${description}"; in eval-nixpkgs-stdenv-outpath = (
(pkgs.runCommand name { } '' mkNixpkgsEvalTest {
export SSL_CERT_FILE=/dev/null attrPath = "stdenv.outPath";
${lib.escapeShellArgs [ expectedPath = pkgs.stdenv.outPath;
"${pkgs.time}/bin/time" }
"--format" "${benchmark-gnutime-format-string description}" );
"${snix-cli}/bin/snix" eval-nixpkgs-hello-outpath = (
"--no-warnings" mkNixpkgsEvalTest {
"-E" expr attrPath = "hello.outPath";
]} expectedPath = pkgs.hello.outPath;
touch $out }
''); );
eval-nixpkgs-firefox-outpath = (
mkNixpkgsEvalTest {
attrPath = "firefox.outPath";
expectedPath = pkgs.firefox.outPath;
}
);
eval-nixpkgs-firefox-drvpath = (
mkNixpkgsEvalTest {
attrPath = "firefox.drvPath";
expectedPath = pkgs.firefox.drvPath;
}
);
eval-nixpkgs-cross-stdenv-outpath = (
mkNixpkgsEvalTest {
attrPath = "pkgsCross.aarch64-multiplatform.stdenv.outPath";
expectedPath = pkgs.pkgsCross.aarch64-multiplatform.stdenv.outPath;
}
);
eval-nixpkgs-cross-hello-outpath = (
mkNixpkgsEvalTest {
attrPath = "pkgsCross.aarch64-multiplatform.hello.outPath";
expectedPath = pkgs.pkgsCross.aarch64-multiplatform.hello.outPath;
}
);
eval-nixpkgs-nixos-graphical-installer-drvpath = (
mkNixpkgsEvalTest {
expr = "(import ${pkgs.path}/nixos/release.nix { }).iso_graphical.${pkgs.system}.drvPath";
expectedPath = (import "${pkgs.path}/nixos/release.nix" { }).iso_graphical.${pkgs.system}.drvPath;
}
);
eval-nixpkgs-nixos-graphical-installer-outpath = (
mkNixpkgsEvalTest {
expr = "(import ${pkgs.path}/nixos/release.nix { }).iso_graphical.${pkgs.system}.outPath";
expectedPath = (import "${pkgs.path}/nixos/release.nix" { }).iso_graphical.${pkgs.system}.outPath;
}
);
};
in
{
meta = {
ci.targets = (builtins.attrNames benchmarks) ++ (builtins.attrNames evalTests);
};
mkNixpkgsBenchmark = attrpath: # Expose benchmarks and evalTests as standard CI targets.
mkExprBenchmark { passthru = previousAttrs.passthru // benchmarks // evalTests;
description = builtins.replaceStrings [ ".drv" ] [ "-drv" ] attrpath; }
expr = "(import ${pkgs.path} {}).${attrpath}"; )
};
# Constructs a Derivation invoking snix-cli inside a build, ensures the
# calculated snix output path matches what's passed in externally.
mkNixpkgsEvalTest =
{ attrPath ? null # An attribute that must already be accessible from `pkgs`. Should evaluate to a store path.
, expr ? null # A Nix expression that should evaluate to a store path.
, expectedPath # The expected store path that should match one of the above.
}:
assert lib.assertMsg (attrPath != null || expr != null) "Either 'attrPath' or 'expr' must be set.";
let
name = "snix-eval-test-${builtins.replaceStrings [".drv"] ["-drv"] (if expr != null then "custom-expr" else attrPath)}";
in
(pkgs.runCommand name { } ''
export SSL_CERT_FILE=/dev/null
SNIX_OUTPUT=$(${snix-cli}/bin/snix --no-warnings -E '${if expr != null then expr else "(import ${pkgs.path} {}).${attrPath}"}')
EXPECTED='${/* the verbatim expected Snix output: */ "=> \"${builtins.unsafeDiscardStringContext expectedPath}\" :: string"}'
echo "Snix output: ''${SNIX_OUTPUT}"
if [ "$SNIX_OUTPUT" != "$EXPECTED" ]; then
echo "Correct would have been ''${EXPECTED}"
exit 1
fi
echo "Output was correct."
touch $out
'');
benchmarks = {
benchmark-hello = (mkNixpkgsBenchmark "hello.outPath");
benchmark-cross-hello = (mkNixpkgsBenchmark "pkgsCross.aarch64-multiplatform.hello.outPath");
benchmark-firefox = (mkNixpkgsBenchmark "firefox.outPath");
benchmark-cross-firefox = (mkNixpkgsBenchmark "pkgsCross.aarch64-multiplatform.firefox.outPath");
# Example used for benchmarking LightSpan::Delayed in commit bf286a54bc2ac5eeb78c3d5c5ae66e9af24d74d4
benchmark-nixpkgs-attrnames = (mkExprBenchmark { expr = "builtins.length (builtins.attrNames (import ${pkgs.path} {}))"; description = "nixpkgs-attrnames"; });
};
evalTests = {
eval-nixpkgs-stdenv-drvpath = (mkNixpkgsEvalTest { attrPath = "stdenv.drvPath"; expectedPath = pkgs.stdenv.drvPath; });
eval-nixpkgs-stdenv-outpath = (mkNixpkgsEvalTest { attrPath = "stdenv.outPath"; expectedPath = pkgs.stdenv.outPath; });
eval-nixpkgs-hello-outpath = (mkNixpkgsEvalTest { attrPath = "hello.outPath"; expectedPath = pkgs.hello.outPath; });
eval-nixpkgs-firefox-outpath = (mkNixpkgsEvalTest { attrPath = "firefox.outPath"; expectedPath = pkgs.firefox.outPath; });
eval-nixpkgs-firefox-drvpath = (mkNixpkgsEvalTest { attrPath = "firefox.drvPath"; expectedPath = pkgs.firefox.drvPath; });
eval-nixpkgs-cross-stdenv-outpath = (mkNixpkgsEvalTest { attrPath = "pkgsCross.aarch64-multiplatform.stdenv.outPath"; expectedPath = pkgs.pkgsCross.aarch64-multiplatform.stdenv.outPath; });
eval-nixpkgs-cross-hello-outpath = (mkNixpkgsEvalTest { attrPath = "pkgsCross.aarch64-multiplatform.hello.outPath"; expectedPath = pkgs.pkgsCross.aarch64-multiplatform.hello.outPath; });
eval-nixpkgs-nixos-graphical-installer-drvpath = (mkNixpkgsEvalTest {
expr = "(import ${pkgs.path}/nixos/release.nix { }).iso_graphical.${pkgs.system}.drvPath";
expectedPath = (import "${pkgs.path}/nixos/release.nix" { }).iso_graphical.${pkgs.system}.drvPath;
});
eval-nixpkgs-nixos-graphical-installer-outpath = (mkNixpkgsEvalTest {
expr = "(import ${pkgs.path}/nixos/release.nix { }).iso_graphical.${pkgs.system}.outPath";
expectedPath = (import "${pkgs.path}/nixos/release.nix" { }).iso_graphical.${pkgs.system}.outPath;
});
};
in
{
meta = {
ci.targets = (builtins.attrNames benchmarks) ++ (builtins.attrNames evalTests);
};
# Expose benchmarks and evalTests as standard CI targets.
passthru = previousAttrs.passthru // benchmarks // evalTests;
})

View file

@ -1 +1 @@
{}: import ./six.nix { } { }: import ./six.nix { }

View file

@ -1 +1,8 @@
{}: { six = builtins.foldl' (x: y: x + y) 0 [ 1 2 3 ]; } { }:
{
six = builtins.foldl' (x: y: x + y) 0 [
1
2
3
];
}

View file

@ -1,5 +1,11 @@
# Nix helpers for projects under //snix # Nix helpers for projects under //snix
{ pkgs, lib, depot, here, ... }: {
pkgs,
lib,
depot,
here,
...
}:
let let
# Load the crate2nix crate tree. # Load the crate2nix crate tree.
@ -13,13 +19,18 @@ let
# Extract the hashes from `crates` / Cargo.nix, we already get them from cargo2nix. # Extract the hashes from `crates` / Cargo.nix, we already get them from cargo2nix.
# This returns an attribute set containing "${crateName}-${version}" as key, # This returns an attribute set containing "${crateName}-${version}" as key,
# and the outputHash as value. # and the outputHash as value.
outputHashes = builtins.listToAttrs outputHashes = builtins.listToAttrs (
(map map
(k: (
(lib.nameValuePair "${crates.internal.crates.${k}.crateName}-${crates.internal.crates.${k}.version}" crates.internal.crates.${k}.src.outputHash) k:
) [ (lib.nameValuePair "${crates.internal.crates.${k}.crateName}-${
"wu-manber" crates.internal.crates.${k}.version
]); }" crates.internal.crates.${k}.src.outputHash)
)
[
"wu-manber"
]
);
}; };
# The cleaned sources. # The cleaned sources.
@ -36,32 +47,42 @@ let
]; ];
}; };
mkCargoBuild = args: pkgs.stdenv.mkDerivation ({ mkCargoBuild =
inherit cargoDeps src; args:
PROTO_ROOT = protos; pkgs.stdenv.mkDerivation (
SNIX_BUILD_SANDBOX_SHELL = "/homeless-shelter"; {
inherit cargoDeps src;
PROTO_ROOT = protos;
SNIX_BUILD_SANDBOX_SHELL = "/homeless-shelter";
nativeBuildInputs = with pkgs; [ nativeBuildInputs =
cargo with pkgs;
pkg-config [
protobuf cargo
rustc pkg-config
rustPlatform.cargoSetupHook protobuf
] ++ (args.nativeBuildInputs or [ ]); rustc
} // (pkgs.lib.removeAttrs args [ "nativeBuildInputs" ])); rustPlatform.cargoSetupHook
]
++ (args.nativeBuildInputs or [ ]);
}
// (pkgs.lib.removeAttrs args [ "nativeBuildInputs" ])
);
in in
{ {
inherit crates protos mkCargoBuild; inherit crates protos mkCargoBuild;
# Provide the snix logo in both .webp and .png format. # Provide the snix logo in both .webp and .png format.
logo = pkgs.runCommand "logo" logo =
{ pkgs.runCommand "logo"
nativeBuildInputs = [ pkgs.imagemagick ]; {
} '' nativeBuildInputs = [ pkgs.imagemagick ];
mkdir -p $out }
cp ${./logo.webp} $out/logo.webp ''
convert $out/logo.webp $out/logo.png mkdir -p $out
''; cp ${./logo.webp} $out/logo.webp
convert $out/logo.webp $out/logo.png
'';
# Provide a shell for the combined dependencies of all snix Rust # Provide a shell for the combined dependencies of all snix Rust
# projects. Note that as this is manually maintained it may be # projects. Note that as this is manually maintained it may be
@ -73,7 +94,12 @@ in
shell = (import ./shell.nix { inherit pkgs; }); shell = (import ./shell.nix { inherit pkgs; });
# Shell, but with tools necessary to run the integration tests # Shell, but with tools necessary to run the integration tests
shell-integration = (import ./shell.nix { inherit pkgs; withIntegration = true; }); shell-integration = (
import ./shell.nix {
inherit pkgs;
withIntegration = true;
}
);
# Build the Rust documentation for publishing on snix.dev/rustdoc. # Build the Rust documentation for publishing on snix.dev/rustdoc.
rust-docs = mkCargoBuild { rust-docs = mkCargoBuild {
@ -81,7 +107,8 @@ in
buildInputs = [ buildInputs = [
pkgs.fuse pkgs.fuse
] ++ lib.optional pkgs.stdenv.isDarwin pkgs.libiconv; ]
++ lib.optional pkgs.stdenv.isDarwin pkgs.libiconv;
buildPhase = '' buildPhase = ''
RUSTDOCFLAGS="-D rustdoc::broken-intra-doc-links" cargo doc --document-private-items RUSTDOCFLAGS="-D rustdoc::broken-intra-doc-links" cargo doc --document-private-items

View file

@ -1,5 +1,10 @@
# TODO: find a way to build the benchmarks via crate2nix # TODO: find a way to build the benchmarks via crate2nix
{ depot, pkgs, lib, ... }: {
depot,
pkgs,
lib,
...
}:
(depot.snix.crates.workspaceMembers.snix-eval.build.override { (depot.snix.crates.workspaceMembers.snix-eval.build.override {
runTests = true; runTests = true;
@ -7,11 +12,16 @@
# Make C++ Nix available, to compare eval results against. # Make C++ Nix available, to compare eval results against.
# This needs Nix 2.3, as nix_oracle.rs fails with pkgs.nix # This needs Nix 2.3, as nix_oracle.rs fails with pkgs.nix
testInputs = [ pkgs.nix_2_3 ]; testInputs = [ pkgs.nix_2_3 ];
}).overrideAttrs (old: rec { }).overrideAttrs
meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru); (old: rec {
passthru = old.passthru // (depot.snix.utils.mkFeaturePowerset { meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (
inherit (old) crateName; lib.attrNames passthru
features = [ "nix_tests" ]; );
override.testInputs = [ pkgs.nix ]; passthru =
}); old.passthru
}) // (depot.snix.utils.mkFeaturePowerset {
inherit (old) crateName;
features = [ "nix_tests" ];
override.testInputs = [ pkgs.nix ];
});
})

View file

@ -5,13 +5,18 @@
testPreRun = '' testPreRun = ''
export SSL_CERT_FILE=/dev/null export SSL_CERT_FILE=/dev/null
''; '';
}).overrideAttrs (old: rec { }).overrideAttrs
meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru); (old: rec {
passthru = old.passthru // (depot.snix.utils.mkFeaturePowerset { meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (
inherit (old) crateName; lib.attrNames passthru
features = [ "nix_tests" ]; );
override.testPreRun = '' passthru =
export SSL_CERT_FILE=/dev/null old.passthru
''; // (depot.snix.utils.mkFeaturePowerset {
}); inherit (old) crateName;
}) features = [ "nix_tests" ];
override.testPreRun = ''
export SSL_CERT_FILE=/dev/null
'';
});
})

View file

@ -4,31 +4,37 @@
# #
# TODO: rewrite in native Rust code # TODO: rewrite in native Rust code
/* This is the implementation of the derivation builtin function. /*
It's actually a wrapper around the derivationStrict primop. */ This is the implementation of the derivation builtin function.
It's actually a wrapper around the derivationStrict primop.
*/
drvAttrs @ { outputs ? [ "out" ], ... }: drvAttrs@{
outputs ? [ "out" ],
...
}:
let let
strict = derivationStrict drvAttrs; strict = derivationStrict drvAttrs;
commonAttrs = drvAttrs // (builtins.listToAttrs outputsList) // commonAttrs =
{ drvAttrs
// (builtins.listToAttrs outputsList)
// {
all = map (x: x.value) outputsList; all = map (x: x.value) outputsList;
inherit drvAttrs; inherit drvAttrs;
}; };
outputToAttrListElement = outputName: outputToAttrListElement = outputName: {
{ name = outputName;
name = outputName; value = commonAttrs // {
value = commonAttrs // { outPath = builtins.getAttr outputName strict;
outPath = builtins.getAttr outputName strict; drvPath = strict.drvPath;
drvPath = strict.drvPath; type = "derivation";
type = "derivation"; inherit outputName;
inherit outputName;
};
}; };
};
outputsList = map outputToAttrListElement outputs; outputsList = map outputToAttrListElement outputs;

View file

@ -5,21 +5,42 @@
# #
# Source: https://github.com/NixOS/nix/blob/2.3.16/corepkgs/fetchurl.nix # Source: https://github.com/NixOS/nix/blob/2.3.16/corepkgs/fetchurl.nix
{ system ? "" # obsolete {
, url system ? "", # obsolete
, hash ? "" # an SRI hash url,
hash ? "", # an SRI hash
# Legacy hash specification # Legacy hash specification
, md5 ? "" md5 ? "",
, sha1 ? "" sha1 ? "",
, sha256 ? "" sha256 ? "",
, sha512 ? "" sha512 ? "",
, outputHash ? if hash != "" then hash else if sha512 != "" then sha512 else if sha1 != "" then sha1 else if md5 != "" then md5 else sha256 outputHash ?
, outputHashAlgo ? if hash != "" then "" else if sha512 != "" then "sha512" else if sha1 != "" then "sha1" else if md5 != "" then "md5" else "sha256" if hash != "" then
hash
else if sha512 != "" then
sha512
else if sha1 != "" then
sha1
else if md5 != "" then
md5
else
sha256,
outputHashAlgo ?
if hash != "" then
""
else if sha512 != "" then
"sha512"
else if sha1 != "" then
"sha1"
else if md5 != "" then
"md5"
else
"sha256",
, executable ? false executable ? false,
, unpack ? false unpack ? false,
, name ? baseNameOf (toString url) name ? baseNameOf (toString url),
}: }:
derivation { derivation {
@ -29,7 +50,12 @@ derivation {
inherit outputHashAlgo outputHash; inherit outputHashAlgo outputHash;
outputHashMode = if unpack || executable then "recursive" else "flat"; outputHashMode = if unpack || executable then "recursive" else "flat";
inherit name url executable unpack; inherit
name
url
executable
unpack
;
system = "builtin"; system = "builtin";

View file

@ -3,7 +3,10 @@ let
name = "fail"; name = "fail";
builder = "/bin/false"; builder = "/bin/false";
system = "x86_64-linux"; system = "x86_64-linux";
outputs = [ "out" "foo" ]; outputs = [
"out"
"foo"
];
}; };
path = "${./eval-okay-context-introspection.nix}"; path = "${./eval-okay-context-introspection.nix}";
@ -13,7 +16,10 @@ let
path = true; path = true;
}; };
"${builtins.unsafeDiscardStringContext drv.drvPath}" = { "${builtins.unsafeDiscardStringContext drv.drvPath}" = {
outputs = [ "foo" "out" ]; outputs = [
"foo"
"out"
];
allOutputs = true; allOutputs = true;
}; };
}; };
@ -21,15 +27,12 @@ let
combo-path = "${path}${drv.outPath}${drv.foo.outPath}${drv.drvPath}"; combo-path = "${path}${drv.outPath}${drv.foo.outPath}${drv.drvPath}";
legit-context = builtins.getContext combo-path; legit-context = builtins.getContext combo-path;
reconstructed-path = builtins.appendContext reconstructed-path = builtins.appendContext (builtins.unsafeDiscardStringContext combo-path) desired-context;
(builtins.unsafeDiscardStringContext combo-path)
desired-context;
# Eta rule for strings with context. # Eta rule for strings with context.
etaRule = str: etaRule =
str == builtins.appendContext str:
(builtins.unsafeDiscardStringContext str) str == builtins.appendContext (builtins.unsafeDiscardStringContext str) (builtins.getContext str);
(builtins.getContext str);
in in
[ [

View file

@ -1,6 +1,7 @@
let s = "foo ${builtins.substring 33 100 (baseNameOf "${./eval-okay-context.nix}")} bar"; let
s = "foo ${builtins.substring 33 100 (baseNameOf "${./eval-okay-context.nix}")} bar";
in in
if s != "foo eval-okay-context.nix bar" if s != "foo eval-okay-context.nix bar" then
then abort "context not discarded" abort "context not discarded"
else builtins.unsafeDiscardStringContext s else
builtins.unsafeDiscardStringContext s

Some files were not shown because too many files have changed in this diff Show more