style: Switch to nixfmt from nixpkgs-fmt

Most of the ecosystem has moved to this formatter,
and many people configured their editors to autoformat it with this formatter.

Closes: https://git.snix.dev/snix/snix/issues/62
Change-Id: Icf39e7836c91fc2ae49fbe22a40a639105bfb0bd
Reviewed-on: https://cl.snix.dev/c/snix/+/30671
Reviewed-by: Florian Klink <flokli@flokli.de>
Tested-by: besadii
Autosubmit: Ilan Joselevich <personal@ilanjoselevich.com>
This commit is contained in:
Ilan Joselevich 2025-08-09 21:08:41 +02:00
parent 3443e6bd08
commit 91d02d8c84
136 changed files with 39952 additions and 11007 deletions

View file

@ -1,9 +1,14 @@
# Check protobuf breaking. Lints already happen in individual targets.
#
{ depot, pkgs, lib, ... }:
{
depot,
pkgs,
lib,
...
}:
let
inherit (depot.nix) bufCheck;# self reference
inherit (depot.nix) bufCheck; # self reference
script = pkgs.writeShellScriptBin "ci-buf-check" ''
export PATH="$PATH:${pkgs.lib.makeBinPath [ pkgs.buf ]}"

View file

@ -1,7 +1,6 @@
{ makeSetupHook }:
makeSetupHook
{
makeSetupHook {
name = "rules_java_bazel_hook";
substitutions = {
local_java = ./local_java;

View file

@ -1,13 +1,14 @@
{ stdenvNoCC
, lib
, makeSetupHook
, fetchFromGitHub
, coreutils
, gnugrep
, nodejs
, yarn
, git
, cacert
{
stdenvNoCC,
lib,
makeSetupHook,
fetchFromGitHub,
coreutils,
gnugrep,
nodejs,
yarn,
git,
cacert,
}:
let
rulesNodeJS = stdenvNoCC.mkDerivation rec {
@ -30,7 +31,12 @@ let
--replace-quiet '#!/usr/bin/env bash' '#!${stdenvNoCC.shell}' \
--replace-quiet '#!/bin/bash' '#!${stdenvNoCC.shell}'
done
sed -i '/^#!/a export PATH=${lib.makeBinPath [ coreutils gnugrep ]}:$PATH' internal/node/launcher.sh
sed -i '/^#!/a export PATH=${
lib.makeBinPath [
coreutils
gnugrep
]
}:$PATH' internal/node/launcher.sh
'';
installPhase = ''
@ -38,8 +44,7 @@ let
'';
};
in
makeSetupHook
{
makeSetupHook {
name = "bazelbuild-rules_nodejs-5-hook";
propagatedBuildInputs = [
nodejs
@ -48,7 +53,12 @@ makeSetupHook
cacert
];
substitutions = {
inherit nodejs yarn cacert rulesNodeJS;
inherit
nodejs
yarn
cacert
rulesNodeJS
;
local_node = ./local_node;
local_yarn = ./local_yarn;
};

View file

@ -1,17 +1,19 @@
{ stdenv
, lib
, pkgs
, coreutils
{
stdenv,
lib,
pkgs,
coreutils,
}:
{ name ? "${baseAttrs.pname}-${baseAttrs.version}"
, bazelTargets
, bazel ? pkgs.bazel
, depsHash
, extraCacheInstall ? ""
, extraBuildSetup ? ""
, extraBuildInstall ? ""
, ...
{
name ? "${baseAttrs.pname}-${baseAttrs.version}",
bazelTargets,
bazel ? pkgs.bazel,
depsHash,
extraCacheInstall ? "",
extraBuildSetup ? "",
extraBuildInstall ? "",
...
}@baseAttrs:
let
@ -24,20 +26,23 @@ let
];
attrs = cleanAttrs baseAttrs;
base = stdenv.mkDerivation (attrs // {
nativeBuildInputs = (attrs.nativeBuildInputs or [ ]) ++ [
bazel
];
base = stdenv.mkDerivation (
attrs
// {
nativeBuildInputs = (attrs.nativeBuildInputs or [ ]) ++ [
bazel
];
preUnpack = ''
if [[ ! -d $HOME ]]; then
export HOME=$NIX_BUILD_TOP/home
mkdir -p $HOME
fi
'';
preUnpack = ''
if [[ ! -d $HOME ]]; then
export HOME=$NIX_BUILD_TOP/home
mkdir -p $HOME
fi
'';
bazelTargetNames = builtins.attrNames bazelTargets;
});
bazelTargetNames = builtins.attrNames bazelTargets;
}
);
cache = base.overrideAttrs (base: {
name = "${name}-deps";
@ -89,18 +94,23 @@ let
installPhase = ''
runHook preInstall
${builtins.concatStringsSep "\n" (lib.mapAttrsToList (target: outPath: lib.optionalString (outPath != null) ''
TARGET_OUTPUTS="$(bazel cquery --repository_cache=$cache/repository-cache $bazelFlags "''${bazelFlagsArray[@]}" --output=files "${target}")"
if [[ "$(echo "$TARGET_OUTPUTS" | wc -l)" -gt 1 ]]; then
echo "Installing ${target}'s outputs ($TARGET_OUTPUTS) into ${outPath} as a directory"
mkdir -p "${outPath}"
cp $TARGET_OUTPUTS "${outPath}"
else
echo "Installing ${target}'s output ($TARGET_OUTPUTS) to ${outPath}"
mkdir -p "${dirOf outPath}"
cp "$TARGET_OUTPUTS" "${outPath}"
fi
'') bazelTargets)}
${builtins.concatStringsSep "\n" (
lib.mapAttrsToList (
target: outPath:
lib.optionalString (outPath != null) ''
TARGET_OUTPUTS="$(bazel cquery --repository_cache=$cache/repository-cache $bazelFlags "''${bazelFlagsArray[@]}" --output=files "${target}")"
if [[ "$(echo "$TARGET_OUTPUTS" | wc -l)" -gt 1 ]]; then
echo "Installing ${target}'s outputs ($TARGET_OUTPUTS) into ${outPath} as a directory"
mkdir -p "${outPath}"
cp $TARGET_OUTPUTS "${outPath}"
else
echo "Installing ${target}'s output ($TARGET_OUTPUTS) to ${outPath}"
mkdir -p "${dirOf outPath}"
cp "$TARGET_OUTPUTS" "${outPath}"
fi
''
) bazelTargets
)}
${extraBuildInstall}
runHook postInstall

View file

@ -1,6 +1,7 @@
{ pkgs, ... }:
(pkgs.callPackage ./buildBazelPackageNG.nix { }) // {
(pkgs.callPackage ./buildBazelPackageNG.nix { })
// {
bazelRulesJavaHook = pkgs.callPackage ./bazelRulesJavaHook { };
bazelRulesNodeJS5Hook = pkgs.callPackage ./bazelRulesNodeJS5Hook { };
}

View file

@ -22,7 +22,8 @@ let
listToAttrs
mapAttrs
toJSON
unsafeDiscardStringContext;
unsafeDiscardStringContext
;
inherit (pkgs) lib runCommand writeText;
inherit (depot.nix.readTree) mkLabel;
@ -33,24 +34,27 @@ rec {
# Create a unique key for the buildkite pipeline based on the given derivation
# or drvPath. A consequence of using such keys is that every derivation may
# only be exposed as a single, unique step in the pipeline.
keyForDrv = drvOrPath:
keyForDrv =
drvOrPath:
let
drvPath =
if lib.isDerivation drvOrPath then drvOrPath.drvPath
else if lib.isString drvOrPath then drvOrPath
else builtins.throw "keyForDrv: expected string or derivation";
if lib.isDerivation drvOrPath then
drvOrPath.drvPath
else if lib.isString drvOrPath then
drvOrPath
else
builtins.throw "keyForDrv: expected string or derivation";
# Only use the drv hash to prevent escaping problems. Buildkite also has a
# limit of 100 characters on keys.
in
"drv-" + (builtins.substring 0 32
(builtins.baseNameOf (unsafeDiscardStringContext drvPath))
);
"drv-" + (builtins.substring 0 32 (builtins.baseNameOf (unsafeDiscardStringContext drvPath)));
# Given an arbitrary attribute path generate a Nix expression which obtains
# this from the root of depot (assumed to be ./.). Attributes may be any
# Nix strings suitable as attribute names, not just Nix literal-safe strings.
mkBuildExpr = attrPath:
mkBuildExpr =
attrPath:
let
descend = expr: attr: "builtins.getAttr \"${attr}\" (${expr})";
in
@ -58,38 +62,49 @@ rec {
# Determine whether to skip a target if it has not diverged from the
# HEAD branch.
shouldSkip = { parentTargetMap ? { }, label, drvPath }:
if (hasAttr label parentTargetMap) && parentTargetMap."${label}".drvPath == drvPath
then "Target has not changed."
else false;
shouldSkip =
{
parentTargetMap ? { },
label,
drvPath,
}:
if (hasAttr label parentTargetMap) && parentTargetMap."${label}".drvPath == drvPath then
"Target has not changed."
else
false;
# Create build command for an attribute path pointing to a derivation.
mkBuildCommand = { attrPath, drvPath, outLink ? "result" }: concatStringsSep " " [
# If the nix build fails, the Nix command's exit status should be used.
"set -o pipefail;"
mkBuildCommand =
{
attrPath,
drvPath,
outLink ? "result",
}:
concatStringsSep " " [
# If the nix build fails, the Nix command's exit status should be used.
"set -o pipefail;"
# First try to realise the drvPath of the target so we don't evaluate twice.
# Nix has no concept of depending on a derivation file without depending on
# at least one of its `outPath`s, so we need to discard the string context
# if we don't want to build everything during pipeline construction.
#
# To make this more uniform with how nix-build(1) works, we call realpath(1)
# on nix-store(1)'s output since it has the habit of printing the path of the
# out link, not the store path.
"(nix-store --realise '${drvPath}' --add-root '${outLink}' --indirect | xargs -r realpath)"
# First try to realise the drvPath of the target so we don't evaluate twice.
# Nix has no concept of depending on a derivation file without depending on
# at least one of its `outPath`s, so we need to discard the string context
# if we don't want to build everything during pipeline construction.
#
# To make this more uniform with how nix-build(1) works, we call realpath(1)
# on nix-store(1)'s output since it has the habit of printing the path of the
# out link, not the store path.
"(nix-store --realise '${drvPath}' --add-root '${outLink}' --indirect | xargs -r realpath)"
# Since we don't gcroot the derivation files, they may be deleted by the
# garbage collector. In that case we can reevaluate and build the attribute
# using nix-build.
"|| (test ! -f '${drvPath}' && nix-build -E '${mkBuildExpr attrPath}' --show-trace --out-link '${outLink}')"
];
# Since we don't gcroot the derivation files, they may be deleted by the
# garbage collector. In that case we can reevaluate and build the attribute
# using nix-build.
"|| (test ! -f '${drvPath}' && nix-build -E '${mkBuildExpr attrPath}' --show-trace --out-link '${outLink}')"
];
# Attribute path of a target relative to the depot root. Needs to take into
# account whether the target is a physical target (which corresponds to a path
# in the filesystem) or the subtarget of a physical target.
targetAttrPath = target:
target.__readTree
++ lib.optionals (target ? __subtarget) [ target.__subtarget ];
targetAttrPath =
target: target.__readTree ++ lib.optionals (target ? __subtarget) [ target.__subtarget ];
# Given a derivation (identified by drvPath) that is part of the list of
# targets passed to mkPipeline, determine all derivations that it depends on
@ -97,11 +112,18 @@ rec {
# that build them. This is used to populate `depends_on` in `mkStep`.
#
# See //nix/dependency-analyzer for documentation on the structure of `targetDepMap`.
getTargetPipelineDeps = targetDepMap: drvPath:
builtins.map keyForDrv (targetDepMap.${drvPath}.knownDeps or [ ]);
getTargetPipelineDeps =
targetDepMap: drvPath: builtins.map keyForDrv (targetDepMap.${drvPath}.knownDeps or [ ]);
# Create a pipeline step from a single target.
mkStep = { headBranch, parentTargetMap, targetDepMap, target, cancelOnBuildFailing }:
mkStep =
{
headBranch,
parentTargetMap,
targetDepMap,
target,
cancelOnBuildFailing,
}:
let
label = mkLabel target;
drvPath = unsafeDiscardStringContext target.drvPath;
@ -120,24 +142,34 @@ rec {
# Add a dependency on the initial static pipeline step which
# always runs. This allows build steps uploaded in batches to
# start running before all batches have been uploaded.
depends_on = [ ":init:" ]
depends_on = [
":init:"
]
++ getTargetPipelineDeps targetDepMap drvPath
++ lib.optionals (target ? meta.ci.buildkiteExtraDeps) target.meta.ci.buildkiteExtraDeps;
} // lib.optionalAttrs (target ? meta.timeout) {
}
// lib.optionalAttrs (target ? meta.timeout) {
timeout_in_minutes = target.meta.timeout / 60;
# Additional arguments to set on the step.
# Keep in mind these *overwrite* existing step args, not extend. Use with caution.
} // lib.optionalAttrs (target ? meta.ci.buildkiteExtraStepArgs) target.meta.ci.buildkiteExtraStepArgs;
}
// lib.optionalAttrs (
target ? meta.ci.buildkiteExtraStepArgs
) target.meta.ci.buildkiteExtraStepArgs;
# Helper function to inelegantly divide a list into chunks of at
# most n elements.
#
# This works by assigning each element a chunk ID based on its
# index, and then grouping all elements by their chunk ID.
chunksOf = n: list:
chunksOf =
n: list:
let
chunkId = idx: toString (idx / n + 1);
assigned = lib.imap1 (idx: value: { inherit value; chunk = chunkId idx; }) list;
assigned = lib.imap1 (idx: value: {
inherit value;
chunk = chunkId idx;
}) list;
unchunk = mapAttrs (_: elements: map (e: e.value) elements);
in
unchunk (lib.groupBy (e: e.chunk) assigned);
@ -156,36 +188,35 @@ rec {
# are uploaded sequentially. This is because of a limitation in the
# Buildkite backend which struggles to process more than a specific
# number of chunks at once.
pipelineChunks = name: steps:
attrValues (mapAttrs (makePipelineChunk name) (chunksOf 192 steps));
pipelineChunks = name: steps: attrValues (mapAttrs (makePipelineChunk name) (chunksOf 192 steps));
# Create a pipeline structure for the given targets.
mkPipeline =
{
# HEAD branch of the repository on which release steps, GC
# anchoring and other "mainline only" steps should run.
headBranch
, # List of derivations as read by readTree (in most cases just the
headBranch,
# List of derivations as read by readTree (in most cases just the
# output of readTree.gather) that should be built in Buildkite.
#
# These are scheduled as the first build steps and run as fast as
# possible, in order, without any concurrency restrictions.
drvTargets
, # Derivation map of a parent commit. Only targets which no longer
drvTargets,
# Derivation map of a parent commit. Only targets which no longer
# correspond to the content of this map will be built. Passing an
# empty map will always build all targets.
parentTargetMap ? { }
, # A list of plain Buildkite step structures to run alongside the
parentTargetMap ? { },
# A list of plain Buildkite step structures to run alongside the
# build for all drvTargets, but before proceeding with any
# post-build actions such as status reporting.
#
# Can be used for things like code formatting checks.
additionalSteps ? [ ]
, # A list of plain Buildkite step structures to run after all
additionalSteps ? [ ],
# A list of plain Buildkite step structures to run after all
# previous steps succeeded.
#
# Can be used for status reporting steps and the like.
postBuildSteps ? [ ]
postBuildSteps ? [ ],
# The list of phases known by the current Buildkite
# pipeline. Dynamic pipeline chunks for each phase are uploaded
# to Buildkite on execution of static part of the
@ -199,7 +230,10 @@ rec {
# - "build" - main phase for building all Nix targets
# - "release" - pushing artifacts to external repositories
# - "deploy" - updating external deployment configurations
, phases ? [ "build" "release" ]
phases ? [
"build"
"release"
],
# Build phases that are active for this invocation (i.e. their
# steps should be generated).
#
@ -208,13 +242,13 @@ rec {
# eval contexts.
#
# TODO(tazjin): Fail/warn if unknown phase is requested.
, activePhases ? phases
activePhases ? phases,
# Setting this attribute to true cancels dynamic pipeline steps
# as soon as the build is marked as failing.
#
# To enable this feature one should enable "Fail Fast" setting
# at Buildkite pipeline or on organization level.
, cancelOnBuildFailing ? false
cancelOnBuildFailing ? false,
}:
let
# List of phases to include.
@ -232,20 +266,25 @@ rec {
# the previous pipeline (per parentTargetMap). Unchanged targets will
# be skipped (assumed already built), so it's useless to emit deps
# on their steps.
changedDrvTargets = builtins.filter
(target:
parentTargetMap.${mkLabel target}.drvPath or null != target.drvPath
)
drvTargets;
changedDrvTargets = builtins.filter (
target: parentTargetMap.${mkLabel target}.drvPath or null != target.drvPath
) drvTargets;
in
dependency-analyzer (dependency-analyzer.drvsToPaths changedDrvTargets);
# Convert a target into all of its steps, separated by build
# phase (as phases end up in different chunks).
targetToSteps = target:
targetToSteps =
target:
let
mkStepArgs = {
inherit headBranch parentTargetMap targetDepMap target cancelOnBuildFailing;
inherit
headBranch
parentTargetMap
targetDepMap
target
cancelOnBuildFailing
;
};
step = mkStep mkStepArgs;
@ -257,19 +296,21 @@ rec {
overridable = f: mkStep (mkStepArgs // { target = (f target); });
# Split extra steps by phase.
splitExtraSteps = lib.groupBy ({ phase, ... }: phase)
(attrValues (mapAttrs (normaliseExtraStep phases overridable)
(target.meta.ci.extraSteps or { })));
splitExtraSteps = lib.groupBy ({ phase, ... }: phase) (
attrValues (mapAttrs (normaliseExtraStep phases overridable) (target.meta.ci.extraSteps or { }))
);
extraSteps = mapAttrs
(_: steps:
map (mkExtraStep (targetAttrPath target) buildEnabled) steps)
splitExtraSteps;
extraSteps = mapAttrs (
_: steps: map (mkExtraStep (targetAttrPath target) buildEnabled) steps
) splitExtraSteps;
in
if !buildEnabled then extraSteps
else extraSteps // {
build = [ step ] ++ (extraSteps.build or [ ]);
};
if !buildEnabled then
extraSteps
else
extraSteps
// {
build = [ step ] ++ (extraSteps.build or [ ]);
};
# Combine all target steps into step lists per phase.
#
@ -279,44 +320,47 @@ rec {
release = postBuildSteps;
};
phasesWithSteps = lib.zipAttrsWithNames enabledPhases (_: concatLists)
((map targetToSteps drvTargets) ++ [ globalSteps ]);
phasesWithSteps = lib.zipAttrsWithNames enabledPhases (_: concatLists) (
(map targetToSteps drvTargets) ++ [ globalSteps ]
);
# Generate pipeline chunks for each phase.
chunks = foldl'
(acc: phase:
let phaseSteps = phasesWithSteps.${phase} or [ ]; in
if phaseSteps == [ ]
then acc
else acc ++ (pipelineChunks phase phaseSteps))
[ ]
enabledPhases;
chunks = foldl' (
acc: phase:
let
phaseSteps = phasesWithSteps.${phase} or [ ];
in
if phaseSteps == [ ] then acc else acc ++ (pipelineChunks phase phaseSteps)
) [ ] enabledPhases;
in
runCommand "buildkite-pipeline" { } ''
mkdir $out
echo "Generated ${toString (length chunks)} pipeline chunks"
${
lib.concatMapStringsSep "\n"
(chunk: "cp ${chunk.path} $out/${chunk.filename}") chunks
}
${lib.concatMapStringsSep "\n" (chunk: "cp ${chunk.path} $out/${chunk.filename}") chunks}
'';
# Create a drvmap structure for the given targets, containing the
# mapping of all target paths to their derivations. The mapping can
# be persisted for future use.
mkDrvmap = drvTargets: writeText "drvmap.json" (toJSON (listToAttrs (map
(target: {
name = mkLabel target;
value = {
drvPath = unsafeDiscardStringContext target.drvPath;
mkDrvmap =
drvTargets:
writeText "drvmap.json" (
toJSON (
listToAttrs (
map (target: {
name = mkLabel target;
value = {
drvPath = unsafeDiscardStringContext target.drvPath;
# Include the attrPath in the output to reconstruct the drv
# without parsing the human-readable label.
attrPath = targetAttrPath target;
};
})
drvTargets)));
# Include the attrPath in the output to reconstruct the drv
# without parsing the human-readable label.
attrPath = targetAttrPath target;
};
}) drvTargets
)
)
);
# Implementation of extra step logic.
#
@ -356,40 +400,49 @@ rec {
# Create a gated step in a step group, independent from any other
# steps.
mkGatedStep = { step, label, parent, prompt }: {
inherit (step) depends_on;
group = label;
skip = parent.skip or false;
mkGatedStep =
{
step,
label,
parent,
prompt,
}:
{
inherit (step) depends_on;
group = label;
skip = parent.skip or false;
steps = [
{
inherit prompt;
branches = step.branches or [ ];
block = ":radio_button: Run ${label}? (from ${parent.env.READTREE_TARGET})";
}
steps = [
{
inherit prompt;
branches = step.branches or [ ];
block = ":radio_button: Run ${label}? (from ${parent.env.READTREE_TARGET})";
}
# The explicit depends_on of the wrapped step must be removed,
# otherwise its dependency relationship with the gate step will
# break.
(builtins.removeAttrs step [ "depends_on" ])
];
};
# The explicit depends_on of the wrapped step must be removed,
# otherwise its dependency relationship with the gate step will
# break.
(builtins.removeAttrs step [ "depends_on" ])
];
};
# Validate and normalise extra step configuration before actually
# generating build steps, in order to use user-provided metadata
# during the pipeline generation.
normaliseExtraStep = phases: overridableParent: key:
{ command
, label ? key
, needsOutput ? false
, parentOverride ? (x: x)
, branches ? null
, alwaysRun ? false
, prompt ? false
, softFail ? false
, phase ? "build"
, skip ? false
, agents ? null
normaliseExtraStep =
phases: overridableParent: key:
{
command,
label ? key,
needsOutput ? false,
parentOverride ? (x: x),
branches ? null,
alwaysRun ? false,
prompt ? false,
softFail ? false,
phase ? "build",
skip ? false,
agents ? null,
}:
let
parent = overridableParent parentOverride;
@ -401,8 +454,7 @@ rec {
Phase '${phase}' is not valid.
Known phases: ${concatStringsSep ", " phases}
''
phase;
'' phase;
in
{
inherit
@ -416,7 +468,8 @@ rec {
parentLabel
softFail
skip
agents;
agents
;
phase = validPhase;
@ -426,13 +479,13 @@ rec {
The 'prompt' feature can not be used by steps in the "build"
phase, because CI builds should not be gated on manual human
approvals.
''
prompt;
'' prompt;
};
# Create the Buildkite configuration for an extra step, optionally
# wrapping it in a gate group.
mkExtraStep = parentAttrPath: buildEnabled: cfg:
mkExtraStep =
parentAttrPath: buildEnabled: cfg:
let
# ATTN: needs to match an entry in .gitignore so that the tree won't get dirty
commandScriptLink = "nix-buildkite-extra-step-command-script";
@ -453,15 +506,11 @@ rec {
in
if cfg.alwaysRun then false else skip';
depends_on = lib.optional
(buildEnabled && !cfg.alwaysRun && !cfg.needsOutput)
cfg.parent.key;
depends_on = lib.optional (buildEnabled && !cfg.alwaysRun && !cfg.needsOutput) cfg.parent.key;
command = ''
set -ueo pipefail
${lib.optionalString cfg.needsOutput
"echo '~~~ Preparing build output of ${cfg.parentLabel}'"
}
${lib.optionalString cfg.needsOutput "echo '~~~ Preparing build output of ${cfg.parentLabel}'"}
${lib.optionalString cfg.needsOutput cfg.parent.command}
echo '--- Building extra step script'
command_script="$(${
@ -469,9 +518,13 @@ rec {
assert builtins.length cfg.command.outputs == 1;
mkBuildCommand {
# script is exposed at <parent>.meta.ci.extraSteps.<key>.command
attrPath =
parentAttrPath
++ [ "meta" "ci" "extraSteps" cfg.key "command" ];
attrPath = parentAttrPath ++ [
"meta"
"ci"
"extraSteps"
cfg.key
"command"
];
drvPath = unsafeDiscardStringContext cfg.command.drvPath;
# make sure it doesn't conflict with result (from needsOutput)
outLink = commandScriptLink;
@ -483,17 +536,17 @@ rec {
'';
soft_fail = cfg.softFail;
} // (lib.optionalAttrs (cfg.agents != null) { inherit (cfg) agents; })
}
// (lib.optionalAttrs (cfg.agents != null) { inherit (cfg) agents; })
// (lib.optionalAttrs (cfg.branches != null) {
branches = lib.concatStringsSep " " cfg.branches;
});
in
if (isString cfg.prompt)
then
mkGatedStep
{
inherit step;
inherit (cfg) label parent prompt;
}
else step;
if (isString cfg.prompt) then
mkGatedStep {
inherit step;
inherit (cfg) label parent prompt;
}
else
step;
}

View file

@ -1,4 +1,9 @@
{ lib, depot, pkgs, ... }:
{
lib,
depot,
pkgs,
...
}:
let
inherit (builtins) unsafeDiscardStringContext appendContext;
@ -18,23 +23,22 @@ let
directDrvDeps =
let
getDeps =
if lib.versionAtLeast builtins.nixVersion "2.6"
then
# Since https://github.com/NixOS/nix/pull/1643, Nix apparently »preserves
# string context« through a readFile invocation. This has the side effect
# that it becomes possible to query the actual references a store path has.
# Not a 100% sure this is intended, but _very_ convenient for us here.
drvPath:
builtins.attrNames (builtins.getContext (builtins.readFile drvPath))
if lib.versionAtLeast builtins.nixVersion "2.6" then
# Since https://github.com/NixOS/nix/pull/1643, Nix apparently »preserves
# string context« through a readFile invocation. This has the side effect
# that it becomes possible to query the actual references a store path has.
# Not a 100% sure this is intended, but _very_ convenient for us here.
drvPath: builtins.attrNames (builtins.getContext (builtins.readFile drvPath))
else
# For Nix < 2.6 we have to rely on HACK, namely grepping for quoted
# store path references in the file. In the future this should be
# replaced by a proper derivation parser.
drvPath: builtins.concatLists (
# For Nix < 2.6 we have to rely on HACK, namely grepping for quoted
# store path references in the file. In the future this should be
# replaced by a proper derivation parser.
drvPath:
builtins.concatLists (
builtins.filter builtins.isList (
builtins.split
"\"(${lib.escapeRegex builtins.storeDir}/[[:alnum:]+._?=-]+.drv)\""
(builtins.readFile drvPath)
builtins.split "\"(${lib.escapeRegex builtins.storeDir}/[[:alnum:]+._?=-]+.drv)\"" (
builtins.readFile drvPath
)
)
);
in
@ -42,15 +46,12 @@ let
# if the passed path is not a derivation we can't necessarily get its
# dependencies, since it may not be representable as a Nix string due to
# NUL bytes, e.g. compressed patch files imported into the Nix store.
if builtins.match "^.+\\.drv$" drvPath == null
then [ ]
else getDeps drvPath;
if builtins.match "^.+\\.drv$" drvPath == null then [ ] else getDeps drvPath;
# Maps a list of derivation to the list of corresponding `drvPath`s.
#
# Type: [drv] -> [str]
drvsToPaths = drvs:
builtins.map (drv: builtins.unsafeDiscardOutputDependency drv.drvPath) drvs;
drvsToPaths = drvs: builtins.map (drv: builtins.unsafeDiscardOutputDependency drv.drvPath) drvs;
#
# Calculate map of direct derivation dependencies
@ -62,7 +63,8 @@ let
# generating the map from
#
# Type: bool -> string -> set
drvEntry = known: drvPath:
drvEntry =
known: drvPath:
let
# key may not refer to a store path, …
key = unsafeDiscardStringContext drvPath;
@ -85,7 +87,8 @@ let
# attribute to `true` if it is in the list of input derivation paths.
#
# Type: [str] -> set
plainDrvDepMap = drvPaths:
plainDrvDepMap =
drvPaths:
builtins.listToAttrs (
builtins.genericClosure {
startSet = builtins.map (drvEntry true) drvPaths;
@ -121,13 +124,15 @@ let
# `fmap (builtins.getAttr "knownDeps") (getAttr drvPath)` will always succeed.
#
# Type: str -> stateMonad drvDepMap null
insertKnownDeps = drvPathWithContext:
insertKnownDeps =
drvPathWithContext:
let
# We no longer need to read from the store, so context is irrelevant, but
# we need to check for attr names which requires the absence of context.
drvPath = unsafeDiscardStringContext drvPathWithContext;
in
bind get (initDepMap:
bind get (
initDepMap:
# Get the dependency map's state before we've done anything to obtain the
# entry we'll be manipulating later as well as its dependencies.
let
@ -135,57 +140,48 @@ let
# We don't need to recurse if our direct dependencies either have their
# knownDeps list already populated or are known dependencies themselves.
depsPrecalculated =
builtins.partition
(dep:
initDepMap.${dep}.known
|| initDepMap.${dep} ? knownDeps
)
entryPoint.deps;
depsPrecalculated = builtins.partition (
dep: initDepMap.${dep}.known || initDepMap.${dep} ? knownDeps
) entryPoint.deps;
# If a direct dependency is known, it goes right to our known dependency
# list. If it is unknown, we can copy its knownDeps list into our own.
initiallyKnownDeps =
builtins.concatLists (
builtins.map
(dep:
if initDepMap.${dep}.known
then [ dep ]
else initDepMap.${dep}.knownDeps
)
depsPrecalculated.right
);
initiallyKnownDeps = builtins.concatLists (
builtins.map (
dep: if initDepMap.${dep}.known then [ dep ] else initDepMap.${dep}.knownDeps
) depsPrecalculated.right
);
in
# If the information was already calculated before, we can exit right away
if entryPoint ? knownDeps
then pure null
if entryPoint ? knownDeps then
pure null
else
after
# For all unknown direct dependencies which don't have a `knownDeps`
# list, we call ourselves recursively to populate it. Since this is
# done sequentially in the state monad, we avoid recalculating the
# list for the same derivation multiple times.
(for_
depsPrecalculated.wrong
insertKnownDeps)
(for_ depsPrecalculated.wrong insertKnownDeps)
# After this we can obtain the updated dependency map which will have
# a `knownDeps` list for all our direct dependencies and update the
# entry for the input `drvPath`.
(bind
get
(populatedDepMap:
(setAttr drvPath (entryPoint // {
knownDeps =
lib.unique (
(
bind get (
populatedDepMap:
(setAttr drvPath (
entryPoint
// {
knownDeps = lib.unique (
initiallyKnownDeps
++ builtins.concatLists (
builtins.map
(dep: populatedDepMap.${dep}.knownDeps)
depsPrecalculated.wrong
++ builtins.concatLists (
builtins.map (dep: populatedDepMap.${dep}.knownDeps) depsPrecalculated.wrong
)
);
}))))
}
))
)
)
);
# This function puts it all together and is exposed via `__functor`.
@ -204,14 +200,8 @@ let
# */
# ];
# }
knownDrvDepMap = knownDrvPaths:
run
(plainDrvDepMap knownDrvPaths)
(after
(for_
knownDrvPaths
insertKnownDeps)
get);
knownDrvDepMap =
knownDrvPaths: run (plainDrvDepMap knownDrvPaths) (after (for_ knownDrvPaths insertKnownDeps) get);
#
# Other things based on knownDrvDepMap
@ -221,39 +211,39 @@ let
# name, so multiple entries can be collapsed if they have the same name.
#
# Type: [drv] -> drv
knownDependencyGraph = name: drvs:
knownDependencyGraph =
name: drvs:
let
justName = drvPath:
builtins.substring
(builtins.stringLength builtins.storeDir + 1 + 32 + 1)
(builtins.stringLength drvPath)
(unsafeDiscardStringContext drvPath);
justName =
drvPath:
builtins.substring (
builtins.stringLength builtins.storeDir + 1 + 32 + 1
) (builtins.stringLength drvPath) (unsafeDiscardStringContext drvPath);
gv = pkgs.writeText "${name}-dependency-analysis.gv" ''
digraph depot {
${
(lib.concatStringsSep "\n"
(lib.mapAttrsToList (name: value:
if !value.known then ""
else lib.concatMapStringsSep "\n"
(knownDep: " \"${justName name}\" -> \"${justName knownDep}\"")
value.knownDeps
)
(depot.nix.dependency-analyzer (
drvsToPaths drvs
))))
(lib.concatStringsSep "\n" (
lib.mapAttrsToList (
name: value:
if !value.known then
""
else
lib.concatMapStringsSep "\n" (
knownDep: " \"${justName name}\" -> \"${justName knownDep}\""
) value.knownDeps
) (depot.nix.dependency-analyzer (drvsToPaths drvs))
))
}
}
'';
in
pkgs.runCommand "${name}-dependency-analysis.svg"
{
nativeBuildInputs = [
pkgs.buildPackages.graphviz
];
}
"dot -Tsvg < ${gv} > $out";
pkgs.runCommand "${name}-dependency-analysis.svg" {
nativeBuildInputs = [
pkgs.buildPackages.graphviz
];
} "dot -Tsvg < ${gv} > $out";
in
{

View file

@ -4,8 +4,8 @@ let
# e.g.
# a"b\c -> "a\"b\\c"
# a\"bc -> "a\\\"bc"
escapeExeclineArg = arg:
''"${builtins.replaceStrings [ ''"'' ''\'' ] [ ''\"'' ''\\'' ] (toString arg)}"'';
escapeExeclineArg =
arg: ''"${builtins.replaceStrings [ ''"'' ''\'' ] [ ''\"'' ''\\'' ] (toString arg)}"'';
# Escapes an execline (list of execline strings) to be passed to execlineb
# Give it a nested list of strings. Nested lists are interpolated as execline
@ -15,15 +15,24 @@ let
# Example:
# escapeExecline [ "if" [ "somecommand" ] "true" ]
# == ''"if" { "somecommand" } "true"''
escapeExecline = execlineList: lib.concatStringsSep " "
(
escapeExecline =
execlineList:
lib.concatStringsSep " " (
let
go = arg:
if builtins.isString arg then [ (escapeExeclineArg arg) ]
else if builtins.isPath arg then [ (escapeExeclineArg "${arg}") ]
else if lib.isDerivation arg then [ (escapeExeclineArg arg) ]
else if builtins.isList arg then [ "{" ] ++ builtins.concatMap go arg ++ [ "}" ]
else abort "escapeExecline can only hande nested lists of strings, was ${lib.generators.toPretty {} arg}";
go =
arg:
if builtins.isString arg then
[ (escapeExeclineArg arg) ]
else if builtins.isPath arg then
[ (escapeExeclineArg "${arg}") ]
else if lib.isDerivation arg then
[ (escapeExeclineArg arg) ]
else if builtins.isList arg then
[ "{" ] ++ builtins.concatMap go arg ++ [ "}" ]
else
abort "escapeExecline can only hande nested lists of strings, was ${
lib.generators.toPretty { } arg
}";
in
builtins.concatMap go execlineList
);

View file

@ -17,75 +17,82 @@ let
# Create the case statement for a command invocations, optionally
# overriding the `TARGET_TOOL` variable.
invoke = name: { attr, cmd ? null }: ''
${name})
attr="${attr}"
${if cmd != null then "TARGET_TOOL=\"${cmd}\"\n;;" else ";;"}
'';
invoke =
name:
{
attr,
cmd ? null,
}:
''
${name})
attr="${attr}"
${if cmd != null then "TARGET_TOOL=\"${cmd}\"\n;;" else ";;"}
'';
# Create command to symlink to the dispatch script for each tool.
link = name: "ln -s $target $out/bin/${name}";
invocations = tools: concatStringsSep "\n" (attrValues (mapAttrs invoke tools));
in
fix (self:
fix (
self:
# Attribute set of tools that should be lazily-added to the $PATH.
#
# The name of each attribute is used as the command name (on $PATH).
# It must contain the keys 'attr' (containing the Nix attribute path
# to the tool's derivation from the top-level), and may optionally
# contain the key 'cmd' to override the name of the binary inside the
# derivation.
tools:
# Attribute set of tools that should be lazily-added to the $PATH.
#
# The name of each attribute is used as the command name (on $PATH).
# It must contain the keys 'attr' (containing the Nix attribute path
# to the tool's derivation from the top-level), and may optionally
# contain the key 'cmd' to override the name of the binary inside the
# derivation.
tools:
pkgs.runCommandNoCC "lazy-dispatch"
{
passthru.overrideDeps = newTools: self (tools // newTools);
passthru.tools = tools;
pkgs.runCommandNoCC "lazy-dispatch"
{
passthru.overrideDeps = newTools: self (tools // newTools);
passthru.tools = tools;
text = ''
#!${pkgs.runtimeShell}
set -ue
text = ''
#!${pkgs.runtimeShell}
set -ue
if ! type git>/dev/null || ! type nix-build>/dev/null; then
echo "The 'git' and 'nix-build' commands must be available." >&2
exit 127
fi
if ! type git>/dev/null || ! type nix-build>/dev/null; then
echo "The 'git' and 'nix-build' commands must be available." >&2
exit 127
fi
readonly REPO_ROOT=$(git rev-parse --show-toplevel)
TARGET_TOOL=$(basename "$0")
readonly REPO_ROOT=$(git rev-parse --show-toplevel)
TARGET_TOOL=$(basename "$0")
case "''${TARGET_TOOL}" in
${invocations tools}
*)
echo "''${TARGET_TOOL} is currently not installed in this repository." >&2
exit 127
;;
esac
case "''${TARGET_TOOL}" in
${invocations tools}
*)
echo "''${TARGET_TOOL} is currently not installed in this repository." >&2
exit 127
;;
esac
result=$(nix-build --no-out-link --attr "''${attr}" "''${REPO_ROOT}")
PATH="''${result}/bin:$PATH"
exec "''${TARGET_TOOL}" "''${@}"
'';
result=$(nix-build --no-out-link --attr "''${attr}" "''${REPO_ROOT}")
PATH="''${result}/bin:$PATH"
exec "''${TARGET_TOOL}" "''${@}"
'';
# Access this to get a compatible nix-shell
passthru.devShell = pkgs.mkShellNoCC {
name = "${self.name}-shell";
packages = [ self ];
};
}
''
# Write the dispatch code
target=$out/bin/__dispatch
mkdir -p "$(dirname "$target")"
echo "$text" > $target
chmod +x $target
# Access this to get a compatible nix-shell
passthru.devShell = pkgs.mkShellNoCC {
name = "${self.name}-shell";
packages = [ self ];
};
}
''
# Write the dispatch code
target=$out/bin/__dispatch
mkdir -p "$(dirname "$target")"
echo "$text" > $target
chmod +x $target
# Add symlinks from all the tools to the dispatch
${concatStringsSep "\n" (map link (attrNames tools))}
# Add symlinks from all the tools to the dispatch
${concatStringsSep "\n" (map link (attrNames tools))}
# Check that it's working-ish
${pkgs.stdenv.shellDryRun} $target
''
# Check that it's working-ish
${pkgs.stdenv.shellDryRun} $target
''
)

View file

@ -32,25 +32,30 @@ let
map
match
readDir
substring;
substring
;
argsWithPath = args: parts:
let meta.locatedAt = parts;
in meta // (if isAttrs args then args else args meta);
argsWithPath =
args: parts:
let
meta.locatedAt = parts;
in
meta // (if isAttrs args then args else args meta);
readDirVisible = path:
readDirVisible =
path:
let
children = readDir path;
# skip hidden files, except for those that contain special instructions to readTree
isVisible = f: f == ".skip-subtree" || f == ".skip-tree" || (substring 0 1 f) != ".";
names = filter isVisible (attrNames children);
in
listToAttrs (map
(name: {
listToAttrs (
map (name: {
inherit name;
value = children.${name};
})
names);
}) names
);
# Create a mark containing the location of this attribute and
# a list of all child attribute names added by readTree.
@ -60,39 +65,48 @@ let
};
# Create a label from a target's tree location.
mkLabel = target:
let label = concatStringsSep "/" target.__readTree;
in if target ? __subtarget
then "${label}:${target.__subtarget}"
else label;
mkLabel =
target:
let
label = concatStringsSep "/" target.__readTree;
in
if target ? __subtarget then "${label}:${target.__subtarget}" else label;
# Merge two attribute sets, but place attributes in `passthru` via
# `overrideAttrs` for derivation targets that support it.
merge = a: b:
if a ? overrideAttrs
then
a.overrideAttrs
(prev: {
passthru = (prev.passthru or { }) // b;
})
else a // b;
merge =
a: b:
if a ? overrideAttrs then
a.overrideAttrs (prev: {
passthru = (prev.passthru or { }) // b;
})
else
a // b;
# Import a file and enforce our calling convention
importFile = args: scopedArgs: path: parts: filter:
importFile =
args: scopedArgs: path: parts: filter:
let
importedFile =
if scopedArgs != { } && builtins ? scopedImport # For snix
then builtins.scopedImport scopedArgs path
else import path;
if
scopedArgs != { } && builtins ? scopedImport # For snix
then
builtins.scopedImport scopedArgs path
else
import path;
pathType = builtins.typeOf importedFile;
in
if pathType != "lambda"
then throw "readTree: trying to import ${toString path}, but its a ${pathType}, you need to make it a function like { depot, pkgs, ... }"
else importedFile (filter parts (argsWithPath args parts));
if pathType != "lambda" then
throw "readTree: trying to import ${toString path}, but its a ${pathType}, you need to make it a function like { depot, pkgs, ... }"
else
importedFile (filter parts (argsWithPath args parts));
nixFileName = file:
let res = match "(.*)\\.nix" file;
in if res == null then null else head res;
nixFileName =
file:
let
res = match "(.*)\\.nix" file;
in
if res == null then null else head res;
# Internal implementation of readTree, which handles things like the
# skipping of trees and subtrees.
@ -105,7 +119,15 @@ let
# The higher-level `readTree` method assembles the final attribute
# set out of these results at the top-level, and the internal
# `children` implementation unwraps and processes nested trees.
readTreeImpl = { args, initPath, rootDir, parts, argsFilter, scopedArgs }:
readTreeImpl =
{
args,
initPath,
rootDir,
parts,
argsFilter,
scopedArgs,
}:
let
dir = readDirVisible initPath;
@ -123,9 +145,10 @@ let
joinChild = c: initPath + ("/" + c);
self =
if rootDir
then { __readTree = [ ]; }
else importFile (args // { here = result; }) scopedArgs initPath parts argsFilter;
if rootDir then
{ __readTree = [ ]; }
else
importFile (args // { here = result; }) scopedArgs initPath parts argsFilter;
# Import subdirectories of the current one, unless any skip
# instructions exist.
@ -134,88 +157,93 @@ let
# should be ignored, but its content is not inspected by
# readTree
filterDir = f: dir."${f}" == "directory";
filteredChildren = map
(c: {
name = c;
value = readTreeImpl {
inherit argsFilter scopedArgs;
args = args;
initPath = (joinChild c);
rootDir = false;
parts = (parts ++ [ c ]);
};
})
(filter filterDir (attrNames dir));
filteredChildren = map (c: {
name = c;
value = readTreeImpl {
inherit argsFilter scopedArgs;
args = args;
initPath = (joinChild c);
rootDir = false;
parts = (parts ++ [ c ]);
};
}) (filter filterDir (attrNames dir));
# Remove skipped children from the final set, and unwrap the
# result set.
children =
if skipSubtree then [ ]
else map ({ name, value }: { inherit name; value = value.ok; }) (filter (child: child.value ? ok) filteredChildren);
if skipSubtree then
[ ]
else
map (
{ name, value }:
{
inherit name;
value = value.ok;
}
) (filter (child: child.value ? ok) filteredChildren);
# Import Nix files
nixFiles =
if skipSubtree then [ ]
else filter (f: f != null) (map nixFileName (attrNames dir));
nixChildren = map
(c:
let
p = joinChild (c + ".nix");
childParts = parts ++ [ c ];
imported = importFile (args // { here = result; }) scopedArgs p childParts argsFilter;
in
{
name = c;
value =
if isAttrs imported
then merge imported (marker childParts { })
else imported;
})
nixFiles;
nixFiles = if skipSubtree then [ ] else filter (f: f != null) (map nixFileName (attrNames dir));
nixChildren = map (
c:
let
p = joinChild (c + ".nix");
childParts = parts ++ [ c ];
imported = importFile (args // { here = result; }) scopedArgs p childParts argsFilter;
in
{
name = c;
value = if isAttrs imported then merge imported (marker childParts { }) else imported;
}
) nixFiles;
nodeValue = if dir ? "default.nix" then self else { };
allChildren = listToAttrs (
if dir ? "default.nix"
then children
else nixChildren ++ children
);
allChildren = listToAttrs (if dir ? "default.nix" then children else nixChildren ++ children);
result =
if isAttrs nodeValue
then merge nodeValue (allChildren // (marker parts allChildren))
else nodeValue;
if isAttrs nodeValue then
merge nodeValue (allChildren // (marker parts allChildren))
else
nodeValue;
in
if skipTree
then { skip = true; }
else {
ok = result;
};
if skipTree then
{ skip = true; }
else
{
ok = result;
};
# Top-level implementation of readTree itself.
readTree = args:
readTree =
args:
let
tree = readTreeImpl args;
in
if tree ? skip
then throw "Top-level folder has a .skip-tree marker and could not be read by readTree!"
else tree.ok;
if tree ? skip then
throw "Top-level folder has a .skip-tree marker and could not be read by readTree!"
else
tree.ok;
# Helper function to fetch subtargets from a target. This is a
# temporary helper to warn on the use of the `meta.targets`
# attribute, which is deprecated in favour of `meta.ci.targets`.
subtargets = node:
let targets = (node.meta.targets or [ ]) ++ (node.meta.ci.targets or [ ]);
in if node ? meta.targets then
subtargets =
node:
let
targets = (node.meta.targets or [ ]) ++ (node.meta.ci.targets or [ ]);
in
if node ? meta.targets then
builtins.trace ''
Warning: The meta.targets attribute is deprecated.
Please move the subtargets of //${mkLabel node} to the
meta.ci.targets attribute.

''
targets else targets;
'' targets
else
targets;
# Function which can be used to find all readTree targets within an
# attribute set.
@ -231,23 +259,29 @@ let
#
# eligible: Function to determine whether the given derivation
# should be included in the build.
gather = eligible: node:
gather =
eligible: node:
if node ? __readTree then
# Include the node itself if it is eligible.
# Include the node itself if it is eligible.
(if eligible node then [ node ] else [ ])
# Include eligible children of the node
++ concatMap (gather eligible) (map (attr: node."${attr}") node.__readTreeChildren)
# Include specified sub-targets of the node
++ filter eligible (map
(k: (node."${k}" or { }) // {
# Keep the same tree location, but explicitly mark this
# node as a subtarget.
__readTree = node.__readTree;
__readTreeChildren = [ ];
__subtarget = k;
})
(subtargets node))
else [ ];
++ filter eligible (
map (
k:
(node."${k}" or { })
// {
# Keep the same tree location, but explicitly mark this
# node as a subtarget.
__readTree = node.__readTree;
__readTreeChildren = [ ];
__subtarget = k;
}
) (subtargets node)
)
else
[ ];
# Determine whether a given value is a derivation.
# Copied from nixpkgs/lib for cases where lib is not available yet.
@ -256,12 +290,14 @@ in
{
inherit gather mkLabel;
__functor = _:
{ path
, args
, filter ? (_parts: x: x)
, scopedArgs ? { }
, rootDir ? true
__functor =
_:
{
path,
args,
filter ? (_parts: x: x),
scopedArgs ? { },
rootDir ? true,
}:
readTree {
inherit args scopedArgs rootDir;
@ -285,43 +321,56 @@ in
# which should be able to access the restricted folder.
#
# reason: Textual explanation for the restriction (included in errors)
restrictFolder = { folder, exceptions ? [ ], reason }: parts: args:
if (elemAt parts 0) == folder || elem parts exceptions
then args
else args // {
depot = args.depot // {
"${folder}" = throw ''
Access to targets under //${folder} is not permitted from
other repository paths. Specific exceptions are configured
at the top-level.
restrictFolder =
{
folder,
exceptions ? [ ],
reason,
}:
parts: args:
if (elemAt parts 0) == folder || elem parts exceptions then
args
else
args
// {
depot = args.depot // {
"${folder}" = throw ''
Access to targets under //${folder} is not permitted from
other repository paths. Specific exceptions are configured
at the top-level.
${reason}
At location: ${builtins.concatStringsSep "." parts}
'';
${reason}
At location: ${builtins.concatStringsSep "." parts}
'';
};
};
};
# This definition of fix is identical to <nixpkgs>.lib.fix, but is
# provided here for cases where readTree is used before nixpkgs can
# be imported.
#
# It is often required to create the args attribute set.
fix = f: let x = f x; in x;
fix =
f:
let
x = f x;
in
x;
# Takes an attribute set and adds a meta.ci.targets attribute to it
# which contains all direct children of the attribute set which are
# derivations.
#
# Type: attrs -> attrs
drvTargets = attrs:
attrs // {
drvTargets =
attrs:
attrs
// {
# preserve .meta from original attrs
meta = (attrs.meta or { }) // {
# preserve .meta.ci (except .targets) from original attrs
ci = (attrs.meta.ci or { }) // {
targets = builtins.filter
(x: isDerivation attrs."${x}")
(builtins.attrNames attrs);
targets = builtins.filter (x: isDerivation attrs."${x}") (builtins.attrNames attrs);
};
};
};

View file

@ -1,18 +1,26 @@
{ depot, lib, pkgs, ... }:
{
depot,
lib,
pkgs,
...
}:
let
inherit (lib) partition optionalAttrs any;
inherit (builtins) tryEval;
it = msg: asserts:
it =
msg: asserts:
let
results = partition (a: a.ok) asserts;
in
{
_it = msg;
} // optionalAttrs (results.right != [ ]) {
}
// optionalAttrs (results.right != [ ]) {
passes = map (result: result.test) results.right;
} // optionalAttrs (results.wrong != [ ]) {
}
// optionalAttrs (results.wrong != [ ]) {
fails = map (result: result.test) results.wrong;
};
@ -21,16 +29,18 @@ let
ok = a == b;
};
assertThrows = test: value:
assertThrows =
test: value:
let
value' = tryEval value;
in
{
inherit test;
ok = ! value'.success;
ok = !value'.success;
};
runTestsuite = name: its:
runTestsuite =
name: its:
let
fails = any (it': it' ? fails) its;
in
@ -42,11 +52,14 @@ let
inherit its;
}
(
if fails then ''
jq '.its' < .attrs.json
'' else ''
jq '.its' < .attrs.json > $out
''
if fails then
''
jq '.its' < .attrs.json
''
else
''
jq '.its' < .attrs.json > $out
''
);
tree-ex = depot.nix.readTree {
@ -55,25 +68,14 @@ let
};
example = it "corresponds to the README example" [
(assertEq "third_party attrset"
(lib.isAttrs tree-ex.third_party
&& (! lib.isDerivation tree-ex.third_party))
true)
(assertEq "third_party attrset other attribute"
tree-ex.third_party.favouriteColour
"orange")
(assertEq "rustpkgs attrset aho-corasick"
tree-ex.third_party.rustpkgs.aho-corasick
"aho-corasick")
(assertEq "rustpkgs attrset serde"
tree-ex.third_party.rustpkgs.serde
"serde")
(assertEq "tools cheddear"
"cheddar"
tree-ex.tools.cheddar)
(assertEq "tools roquefort"
tree-ex.tools.roquefort
"roquefort")
(assertEq "third_party attrset" (
lib.isAttrs tree-ex.third_party && (!lib.isDerivation tree-ex.third_party)
) true)
(assertEq "third_party attrset other attribute" tree-ex.third_party.favouriteColour "orange")
(assertEq "rustpkgs attrset aho-corasick" tree-ex.third_party.rustpkgs.aho-corasick "aho-corasick")
(assertEq "rustpkgs attrset serde" tree-ex.third_party.rustpkgs.serde "serde")
(assertEq "tools cheddear" "cheddar" tree-ex.tools.cheddar)
(assertEq "tools roquefort" tree-ex.tools.roquefort "roquefort")
];
tree-tl = depot.nix.readTree {
@ -82,65 +84,64 @@ let
};
traversal-logic = it "corresponds to the traversal logic in the README" [
(assertEq "skip-tree/a is read"
tree-tl.skip-tree.a
"a is read normally")
(assertEq "skip-tree does not contain b"
(builtins.attrNames tree-tl.skip-tree)
[ "__readTree" "__readTreeChildren" "a" ])
(assertEq "skip-tree children list does not contain b"
tree-tl.skip-tree.__readTreeChildren
[ "a" ])
(assertEq "skip-tree/a is read" tree-tl.skip-tree.a "a is read normally")
(assertEq "skip-tree does not contain b" (builtins.attrNames tree-tl.skip-tree) [
"__readTree"
"__readTreeChildren"
"a"
])
(assertEq "skip-tree children list does not contain b" tree-tl.skip-tree.__readTreeChildren [ "a" ])
(assertEq "skip subtree default.nix is read"
tree-tl.skip-subtree.but
"the default.nix is still read")
(assertEq "skip subtree a/default.nix is skipped"
(tree-tl.skip-subtree ? a)
false)
(assertEq "skip subtree b/c.nix is skipped"
(tree-tl.skip-subtree ? b)
false)
(assertEq "skip subtree default.nix is read" tree-tl.skip-subtree.but
"the default.nix is still read"
)
(assertEq "skip subtree a/default.nix is skipped" (tree-tl.skip-subtree ? a) false)
(assertEq "skip subtree b/c.nix is skipped" (tree-tl.skip-subtree ? b) false)
(assertEq "skip subtree a/default.nix would be read without .skip-subtree"
(tree-tl.no-skip-subtree.a)
"am I subtree yet?")
(assertEq "skip subtree b/c.nix would be read without .skip-subtree"
(tree-tl.no-skip-subtree.b.c)
"cool")
"am I subtree yet?"
)
(assertEq "skip subtree b/c.nix would be read without .skip-subtree" (tree-tl.no-skip-subtree.b.c
) "cool")
(assertEq "default.nix attrset is merged with siblings"
tree-tl.default-nix.no
"siblings should be read")
(assertEq "default.nix means sibling isnt read"
(tree-tl.default-nix ? sibling)
false)
(assertEq "default.nix attrset is merged with siblings" tree-tl.default-nix.no
"siblings should be read"
)
(assertEq "default.nix means sibling isnt read" (tree-tl.default-nix ? sibling) false)
(assertEq "default.nix means subdirs are still read and merged into default.nix"
(tree-tl.default-nix.subdir.a)
"but Im picked up")
"but Im picked up"
)
(assertEq "default.nix can be not an attrset"
tree-tl.default-nix.no-merge
"Im not merged with any children")
(assertEq "default.nix is not an attrset -> children are not merged"
(tree-tl.default-nix.no-merge ? subdir)
false)
(assertEq "default.nix can be not an attrset" tree-tl.default-nix.no-merge
"Im not merged with any children"
)
(assertEq "default.nix is not an attrset -> children are not merged" (
tree-tl.default-nix.no-merge ? subdir
) false)
(assertEq "default.nix can contain a derivation"
(lib.isDerivation tree-tl.default-nix.can-be-drv)
true)
(assertEq "default.nix can contain a derivation" (lib.isDerivation tree-tl.default-nix.can-be-drv)
true
)
(assertEq "Even if default.nix is a derivation, children are traversed and merged"
tree-tl.default-nix.can-be-drv.subdir.a
"Picked up through the drv")
(assertEq "default.nix drv is not changed by readTree"
tree-tl.default-nix.can-be-drv
(import ./test-tree-traversal/default-nix/can-be-drv/default.nix { }))
"Picked up through the drv"
)
(assertEq "default.nix drv is not changed by readTree" tree-tl.default-nix.can-be-drv (
import ./test-tree-traversal/default-nix/can-be-drv/default.nix { }
))
(assertEq "`here` argument represents the attrset a given file is part of"
(builtins.removeAttrs tree-tl.here-arg [ "__readTree" "__readTreeChildren" "subdir" ])
(builtins.removeAttrs tree-tl.here-arg [
"__readTree"
"__readTreeChildren"
"subdir"
])
{
attr1 = "foo";
attr2 = "foo";
attr3 = "sibl1";
})
}
)
];
# these each call readTree themselves because the throws have to happen inside assertThrows
@ -149,7 +150,8 @@ let
(depot.nix.readTree {
path = ./test-wrong-not-a-function;
args = { };
}).not-a-function)
}).not-a-function
)
# cant test for that, assertThrows cant catch this error
# (assertThrows "this file is a function but doesnt have dots"
# (depot.nix.readTree {} ./test-wrong-no-dots).no-dots-in-function)
@ -160,22 +162,36 @@ let
args = { };
};
assertMarkerByPath = path:
assertMarkerByPath =
path:
assertEq "${lib.concatStringsSep "." path} is marked correctly"
(lib.getAttrFromPath path read-markers).__readTree
path;
markers = it "marks nodes correctly" [
(assertMarkerByPath [ "directory-marked" ])
(assertMarkerByPath [ "directory-marked" "nested" ])
(assertMarkerByPath [ "file-children" "one" ])
(assertMarkerByPath [ "file-children" "two" ])
(assertEq "nix file children are marked correctly"
read-markers.file-children.__readTreeChildren [ "one" "two" ])
(assertEq "directory children are marked correctly"
read-markers.directory-marked.__readTreeChildren [ "nested" ])
(assertEq "absence of children is marked"
read-markers.directory-marked.nested.__readTreeChildren [ ])
(assertMarkerByPath [
"directory-marked"
"nested"
])
(assertMarkerByPath [
"file-children"
"one"
])
(assertMarkerByPath [
"file-children"
"two"
])
(assertEq "nix file children are marked correctly" read-markers.file-children.__readTreeChildren [
"one"
"two"
])
(assertEq "directory children are marked correctly" read-markers.directory-marked.__readTreeChildren
[ "nested" ]
)
(assertEq "absence of children is marked" read-markers.directory-marked.nested.__readTreeChildren
[ ]
)
];
in

View file

@ -1,3 +1,4 @@
{ ... }:
{ }
{
}

View file

@ -1,3 +1,4 @@
{ ... }:
{ }
{
}

View file

@ -1,3 +1,4 @@
{ ... }:
{ }
{
}

View file

@ -1,3 +1,4 @@
{ ... }:
{ }
{
}

View file

@ -3,5 +3,8 @@ derivation {
name = "im-a-drv";
system = builtins.currentSystem;
builder = "/bin/sh";
args = [ "-c" ''echo "" > $out'' ];
args = [
"-c"
''echo "" > $out''
];
}

View file

@ -1,4 +1,5 @@
{ here, ... }: {
{ here, ... }:
{
attr1 = "foo";
attr2 = here.attr1;

View file

@ -1,2 +1 @@
{ here, ... }:
here.sibl1
{ here, ... }: here.sibl1

View file

@ -1,3 +1,3 @@
{}:
{ }:
"This is a function, but readTree wants to pass a bunch of arguments, and not having dots means we depend on exactly which arguments."

View file

@ -15,19 +15,19 @@
{
# root path to use as a reference point
root
, # list of paths below `root` that should be
root,
# list of paths below `root` that should be
# included in the resulting directory
#
# If path, need to refer to the actual file / directory to be included.
# If a string, it is treated as a string relative to the root.
paths
, # (optional) name to use for the derivation
paths,
# (optional) name to use for the derivation
#
# This should always be set when using roots that do not have
# controlled names, such as when passing the top-level of a git
# repository (e.g. `depot.path.origSrc`).
name ? builtins.baseNameOf root
name ? builtins.baseNameOf root,
}:
let
@ -36,12 +36,14 @@ let
# Count slashes in a path.
#
# Type: path -> int
depth = path: lib.pipe path [
toString
(builtins.split "/")
(builtins.filter builtins.isList)
builtins.length
];
depth =
path:
lib.pipe path [
toString
(builtins.split "/")
(builtins.filter builtins.isList)
builtins.length
];
# (Parent) directories will be created from deepest to shallowest
# which should mean no conflicts are caused unless both a child
@ -52,19 +54,24 @@ let
# Create a set which contains the source path to copy / symlink and
# it's destination, so the path below the destination root including
# a leading slash. Additionally some sanity checking is done.
makeSymlink = path:
makeSymlink =
path:
let
withLeading = p: if builtins.substring 0 1 p == "/" then p else "/" + p;
fullPath =
if builtins.isPath path then path
else if builtins.isString path then (root + withLeading path)
else builtins.throw "Unsupported path type ${builtins.typeOf path}";
if builtins.isPath path then
path
else if builtins.isString path then
(root + withLeading path)
else
builtins.throw "Unsupported path type ${builtins.typeOf path}";
strPath = toString fullPath;
contextPath = "${fullPath}";
belowRoot = builtins.substring rootLength (-1) strPath;
prefix = builtins.substring 0 rootLength strPath;
in
assert toString root == prefix; {
assert toString root == prefix;
{
src = contextPath;
dst = belowRoot;
};
@ -73,12 +80,13 @@ let
in
# TODO(sterni): teach readTree to also read symlinked directories,
# so we ln -sT instead of cp -aT.
# so we ln -sT instead of cp -aT.
pkgs.runCommand "sparse-${name}" { } (
lib.concatMapStrings
({ src, dst }: ''
lib.concatMapStrings (
{ src, dst }:
''
mkdir -p "$(dirname "$out${dst}")"
cp -aT --reflink=auto "${src}" "$out${dst}"
'')
symlinks
''
) symlinks
)

View file

@ -10,26 +10,23 @@ rec {
#
# Type: stateMonad s a -> (a -> stateMonad s b) -> stateMonad s b
bind = action: f: state:
bind =
action: f: state:
let
afterAction = action state;
in
(f afterAction.value) afterAction.state;
# Type: stateMonad s a -> stateMonad s b -> stateMonad s b
after = action1: action2: state: action2 (action1 state).state;
after =
action1: action2: state:
action2 (action1 state).state;
# Type: stateMonad s (stateMonad s a) -> stateMonad s a
join = action: bind action (action': action');
# Type: [a] -> (a -> stateMonad s b) -> stateMonad s null
for_ = xs: f:
builtins.foldl'
(laterAction: x:
after (f x) laterAction
)
(pure null)
xs;
for_ = xs: f: builtins.foldl' (laterAction: x: after (f x) laterAction) (pure null) xs;
#
# Applicative
@ -52,10 +49,16 @@ rec {
#
# Type: (s -> s) -> stateMonad s null
modify = f: state: { value = null; state = f state; };
modify = f: state: {
value = null;
state = f state;
};
# Type: stateMonad s s
get = state: { value = state; inherit state; };
get = state: {
value = state;
inherit state;
};
# Type: s -> stateMonad s null
set = new: modify (_: new);
@ -64,9 +67,15 @@ rec {
getAttr = attr: fmap (state: state.${attr}) get;
# Type: str -> (any -> any) -> stateMonad s null
modifyAttr = attr: f: modify (state: state // {
${attr} = f state.${attr};
});
modifyAttr =
attr: f:
modify (
state:
state
// {
${attr} = f state.${attr};
}
);
# Type: str -> any -> stateMonad s null
setAttr = attr: value: modifyAttr attr (_: value);

View file

@ -3,28 +3,33 @@ let
# Takes a tag, checks whether it is an attrset with one element,
# if so sets `isTag` to `true` and sets the name and value.
# If not, sets `isTag` to `false` and sets `errmsg`.
verifyTag = tag:
verifyTag =
tag:
let
cases = builtins.attrNames tag;
len = builtins.length cases;
in
if builtins.length cases == 1
then
let name = builtins.head cases; in {
if builtins.length cases == 1 then
let
name = builtins.head cases;
in
{
isTag = true;
name = name;
val = tag.${name};
errmsg = null;
}
else {
isTag = false;
errmsg =
("match: an instance of a sum is an attrset "
else
{
isTag = false;
errmsg = (
"match: an instance of a sum is an attrset "
+ "with exactly one element, yours had ${toString len}"
+ ", namely: ${lib.generators.toPretty {} cases}");
name = null;
val = null;
};
+ ", namely: ${lib.generators.toPretty { } cases}"
);
name = null;
val = null;
};
# Returns the tag name of a given tag attribute set.
# Throws if the tag is invalid.
@ -39,11 +44,15 @@ let
tagValue = tag: (assertIsTag tag).val;
# like `verifyTag`, but throws the error message if it is not a tag.
assertIsTag = tag:
let res = verifyTag tag; in
assertIsTag =
tag:
let
res = verifyTag tag;
in
assert res.isTag || throw res.errmsg;
{ inherit (res) name val; };
{
inherit (res) name val;
};
# Discriminator for values.
# Goes through a list of tagged predicates `{ <tag> = <pred>; }`
@ -64,22 +73,22 @@ let
# { negative = i: i < 0; }
# ] 1
# => { smol = 1; }
discrDef = defTag: fs: v:
discrDef =
defTag: fs: v:
let
res = lib.findFirst
(t: t.val v)
null
(map assertIsTag fs);
res = lib.findFirst (t: t.val v) null (map assertIsTag fs);
in
if res == null
then { ${defTag} = v; }
else { ${res.name} = v; };
if res == null then { ${defTag} = v; } else { ${res.name} = v; };
# Like `discrDef`, but fail if there is no match.
discr = fs: v:
let res = discrDef null fs v; in
assert lib.assertMsg (res != { })
"tag.discr: No predicate found that matches ${lib.generators.toPretty {} v}";
discr =
fs: v:
let
res = discrDef null fs v;
in
assert lib.assertMsg (
res != { }
) "tag.discr: No predicate found that matches ${lib.generators.toPretty { } v}";
res;
# The canonical pattern matching primitive.
@ -104,20 +113,27 @@ let
# match success matcher == 43
# && match failure matcher == 0;
#
match = sum: matcher:
let cases = builtins.attrNames sum;
in assert
let len = builtins.length cases; in
lib.assertMsg (len == 1)
("match: an instance of a sum is an attrset "
match =
sum: matcher:
let
cases = builtins.attrNames sum;
in
assert
let
len = builtins.length cases;
in
lib.assertMsg (len == 1) (
"match: an instance of a sum is an attrset "
+ "with exactly one element, yours had ${toString len}"
+ ", namely: ${lib.generators.toPretty {} cases}");
let case = builtins.head cases;
in assert
lib.assertMsg (matcher ? ${case})
("match: \"${case}\" is not a valid case of this sum, "
+ "the matcher accepts: ${lib.generators.toPretty {}
(builtins.attrNames matcher)}");
+ ", namely: ${lib.generators.toPretty { } cases}"
);
let
case = builtins.head cases;
in
assert lib.assertMsg (matcher ? ${case}) (
"match: \"${case}\" is not a valid case of this sum, "
+ "the matcher accepts: ${lib.generators.toPretty { } (builtins.attrNames matcher)}"
);
matcher.${case} sum.${case};
# A `match` with the arguments flipped.

View file

@ -1,100 +1,99 @@
{ depot, lib, ... }:
let
/* Get the basename of a store path without
the leading hash.
/*
Get the basename of a store path without
the leading hash.
Type: (path | drv | string) -> string
Type: (path | drv | string) -> string
Example:
storePathName ./foo.c
=> "foo.c"
Example:
storePathName ./foo.c
=> "foo.c"
storePathName (writeText "foo.c" "int main() { return 0; }")
=> "foo.c"
storePathName (writeText "foo.c" "int main() { return 0; }")
=> "foo.c"
storePathName "${hello}/bin/hello"
=> "hello"
storePathName "${hello}/bin/hello"
=> "hello"
*/
storePathName = p:
if lib.isDerivation p
then p.name
else if builtins.isPath p
then builtins.baseNameOf p
else if builtins.isString p || (builtins.isAttrs p && (p ? outPath || p ? __toString))
then
storePathName =
p:
if lib.isDerivation p then
p.name
else if builtins.isPath p then
builtins.baseNameOf p
else if builtins.isString p || (builtins.isAttrs p && (p ? outPath || p ? __toString)) then
let
strPath = toString p;
# strip leading storeDir and trailing slashes
noStoreDir = lib.removeSuffix "/"
(lib.removePrefix "${builtins.storeDir}/" strPath);
noStoreDir = lib.removeSuffix "/" (lib.removePrefix "${builtins.storeDir}/" strPath);
# a basename of a child of a store path isn't really
# referring to a store path, so removing the string
# context is safe (e. g. "hello" for "${hello}/bin/hello").
basename = builtins.unsafeDiscardStringContext
(builtins.baseNameOf strPath);
basename = builtins.unsafeDiscardStringContext (builtins.baseNameOf strPath);
in
# If p is a direct child of storeDir, we need to remove
# the leading hash as well to make sure that:
# `storePathName drv == storePathName (toString drv)`.
if noStoreDir == basename
then builtins.substring 33 (-1) basename
else basename
else builtins.throw "Don't know how to get (base)name of "
+ lib.generators.toPretty { } p;
# the leading hash as well to make sure that:
# `storePathName drv == storePathName (toString drv)`.
if noStoreDir == basename then builtins.substring 33 (-1) basename else basename
else
builtins.throw "Don't know how to get (base)name of " + lib.generators.toPretty { } p;
/* Query the type of a path exposing the same information as would be by
`builtins.readDir`, but for a single, specific target path.
/*
Query the type of a path exposing the same information as would be by
`builtins.readDir`, but for a single, specific target path.
The information is returned as a tagged value, i. e. an attribute set with
exactly one attribute where the type of the path is encoded in the name
of the single attribute. The allowed tags and values are as follows:
The information is returned as a tagged value, i. e. an attribute set with
exactly one attribute where the type of the path is encoded in the name
of the single attribute. The allowed tags and values are as follows:
* `regular`: is a regular file, always `true` if returned
* `directory`: is a directory, always `true` if returned
* `missing`: path does not exist, always `true` if returned
* `symlink`: path is a symlink, always `true` if returned
* `regular`: is a regular file, always `true` if returned
* `directory`: is a directory, always `true` if returned
* `missing`: path does not exist, always `true` if returned
* `symlink`: path is a symlink, always `true` if returned
Type: path(-like) -> tag
Type: path(-like) -> tag
`tag` refers to the attribute set format of `//nix/tag`.
`tag` refers to the attribute set format of `//nix/tag`.
Example:
pathType ./foo.c
=> { regular = true; }
Example:
pathType ./foo.c
=> { regular = true; }
pathType /home/lukas
=> { directory = true; }
pathType /home/lukas
=> { directory = true; }
pathType ./result
=> { symlink = true; }
pathType ./result
=> { symlink = true; }
pathType ./link-to-file
=> { symlink = true; }
pathType ./link-to-file
=> { symlink = true; }
pathType /does/not/exist
=> { missing = true; }
pathType /does/not/exist
=> { missing = true; }
# Check if a path exists
!(pathType /file ? missing)
# Check if a path exists
!(pathType /file ? missing)
# Check if a path is a directory or a symlink to a directory
# A handy shorthand for this is provided as `realPathIsDirectory`.
pathType /path ? directory || (pathType /path).symlink or null == "directory"
# Check if a path is a directory or a symlink to a directory
# A handy shorthand for this is provided as `realPathIsDirectory`.
pathType /path ? directory || (pathType /path).symlink or null == "directory"
# Match on the result using //nix/tag
nix.tag.match (nix.utils.pathType ./result) {
symlink = _: "symlink";
directory = _: "directory";
regular = _: "regular";
missing = _: "path does not exist";
}
=> "symlink"
# Match on the result using //nix/tag
nix.tag.match (nix.utils.pathType ./result) {
symlink = _: "symlink";
directory = _: "directory";
regular = _: "regular";
missing = _: "path does not exist";
}
=> "symlink"
# Query path type
nix.tag.tagName (pathType /path)
# Query path type
nix.tag.tagName (pathType /path)
*/
pathType = path:
pathType =
path:
let
# baseNameOf is very annoyed if we proceed with string context.
# We need to call toString to prevent unsafeDiscardStringContext
@ -119,52 +118,56 @@ let
${thisPathType} = true;
};
pathType' = path:
pathType' =
path:
let
p = pathType path;
in
if p ? missing
then builtins.throw "${lib.generators.toPretty {} path} does not exist"
else p;
if p ? missing then builtins.throw "${lib.generators.toPretty { } path} does not exist" else p;
/* Check whether the given path is a directory.
Throws if the path in question doesn't exist.
/*
Check whether the given path is a directory.
Throws if the path in question doesn't exist.
Type: path(-like) -> bool
Type: path(-like) -> bool
*/
isDirectory = path: pathType' path ? directory;
/* Check whether the given path is a regular file.
Throws if the path in question doesn't exist.
/*
Check whether the given path is a regular file.
Throws if the path in question doesn't exist.
Type: path(-like) -> bool
Type: path(-like) -> bool
*/
isRegularFile = path: pathType' path ? regular;
/* Check whether the given path is a symbolic link.
Throws if the path in question doesn't exist.
/*
Check whether the given path is a symbolic link.
Throws if the path in question doesn't exist.
Type: path(-like) -> bool
Type: path(-like) -> bool
*/
isSymlink = path: pathType' path ? symlink;
/* Checks whether the given value is (or contains) a reference to a
path that will be retained in the store path resulting from a derivation.
So if isReferencablePath returns true, the given value may be used in a
way that allows accessing it at runtime of any Nix built program.
/*
Checks whether the given value is (or contains) a reference to a
path that will be retained in the store path resulting from a derivation.
So if isReferencablePath returns true, the given value may be used in a
way that allows accessing it at runtime of any Nix built program.
Returns true for:
Returns true for:
- Strings with context (if the string is/contains a single path is not verified!)
- Path values
- Derivations
- Strings with context (if the string is/contains a single path is not verified!)
- Path values
- Derivations
Note that the value still needs to used in a way that forces string context
(and thus reference tracking) to be created, e.g. in string interpolation.
Note that the value still needs to used in a way that forces string context
(and thus reference tracking) to be created, e.g. in string interpolation.
Type: any -> bool
Type: any -> bool
*/
isReferencablePath = value:
isReferencablePath =
value:
builtins.isPath value
|| lib.isDerivation value
|| (builtins.isString value && builtins.hasContext value);

View file

@ -1,35 +1,55 @@
{ depot, lib, pkgs, ... }:
{
depot,
lib,
pkgs,
...
}:
let
inherit (lib) fix pipe mapAttrsToList isAttrs concatLines isString isDerivation isPath;
inherit (lib)
fix
pipe
mapAttrsToList
isAttrs
concatLines
isString
isDerivation
isPath
;
inherit (depot.nix.utils) isReferencablePath;
esc = s: lib.escapeShellArg /* ensure paths import into store */ "${s}";
esc =
s:
lib.escapeShellArg # ensure paths import into store
"${s}";
writeTreeAtPath = path: tree:
writeTreeAtPath =
path: tree:
''
mkdir -p "$out/"${esc path}
''
+ pipe tree [
(mapAttrsToList (k: v:
(mapAttrsToList (
k: v:
if isReferencablePath v then
"cp -R --reflink=auto ${esc "${v}"} \"$out/\"${esc path}/${esc k}"
else if lib.isAttrs v then
writeTreeAtPath (path + "/" + k) v
else
throw "invalid type (expected path, derivation, string with context, or attrs)"))
throw "invalid type (expected path, derivation, string with context, or attrs)"
))
concatLines
];
/* Create a directory tree specified by a Nix attribute set structure.
/*
Create a directory tree specified by a Nix attribute set structure.
Each value in `tree` should either be a file, a directory, or another tree
attribute set. Those paths will be written to a directory tree
corresponding to the structure of the attribute set.
Each value in `tree` should either be a file, a directory, or another tree
attribute set. Those paths will be written to a directory tree
corresponding to the structure of the attribute set.
Type: string -> attrSet -> derivation
Type: string -> attrSet -> derivation
*/
writeTree = name: tree:
pkgs.runCommandLocal name { } (writeTreeAtPath "" tree);
writeTree = name: tree: pkgs.runCommandLocal name { } (writeTreeAtPath "" tree);
in
# __functor trick so readTree can add the tests attribute

View file

@ -1,93 +1,102 @@
{ depot, pkgs, lib, ... }:
{
depot,
pkgs,
lib,
...
}:
let
inherit (pkgs) runCommand writeText writeTextFile;
inherit (depot.nix) writeTree;
checkTree = name: tree: expected:
checkTree =
name: tree: expected:
runCommand "writeTree-test-${name}"
{
nativeBuildInputs = [ pkgs.buildPackages.lr ];
passAsFile = [ "expected" ];
inherit expected;
} ''
actualPath="$NIX_BUILD_TOP/actual"
cd ${lib.escapeShellArg (writeTree name tree)}
lr . > "$actualPath"
diff -u "$expectedPath" "$actualPath" | tee "$out"
'';
}
''
actualPath="$NIX_BUILD_TOP/actual"
cd ${lib.escapeShellArg (writeTree name tree)}
lr . > "$actualPath"
diff -u "$expectedPath" "$actualPath" | tee "$out"
'';
in
depot.nix.readTree.drvTargets {
empty = checkTree "empty" { }
''
.
'';
empty = checkTree "empty" { } ''
.
'';
simple-paths = checkTree "simple"
{
writeTree = {
meta = {
"owners.txt" = ../OWNERS;
simple-paths =
checkTree "simple"
{
writeTree = {
meta = {
"owners.txt" = ../OWNERS;
};
"code.nix" = ../default.nix;
all-tests = ./.;
nested.dirs.eval-time = builtins.toFile "owothia" ''
hold me owo
'';
};
"code.nix" = ../default.nix;
all-tests = ./.;
nested.dirs.eval-time = builtins.toFile "owothia" ''
hold me owo
'';
};
}
''
.
./writeTree
./writeTree/all-tests
./writeTree/all-tests/default.nix
./writeTree/code.nix
./writeTree/meta
./writeTree/meta/owners.txt
./writeTree/nested
./writeTree/nested/dirs
./writeTree/nested/dirs/eval-time
'';
empty-dirs = checkTree "empty-dirs"
{
this.dir.is.empty = { };
so.is.this.one = { };
}
''
.
./so
./so/is
./so/is/this
./so/is/this/one
./this
./this/dir
./this/dir/is
./this/dir/is/empty
'';
drvs = checkTree "drvs"
{
file-drv = writeText "road.txt" ''
Any road followed precisely to its end leads precisely nowhere.
}
''
.
./writeTree
./writeTree/all-tests
./writeTree/all-tests/default.nix
./writeTree/code.nix
./writeTree/meta
./writeTree/meta/owners.txt
./writeTree/nested
./writeTree/nested/dirs
./writeTree/nested/dirs/eval-time
'';
dir-drv = writeTextFile {
name = "dir-of-text";
destination = "/text/in/more/dirs.txt";
text = ''
Climb the mountain just a little bit to test that its a mountain.
From the top of the mountain, you cannot see the mountain.
empty-dirs =
checkTree "empty-dirs"
{
this.dir.is.empty = { };
so.is.this.one = { };
}
''
.
./so
./so/is
./so/is/this
./so/is/this/one
./this
./this/dir
./this/dir/is
./this/dir/is/empty
'';
drvs =
checkTree "drvs"
{
file-drv = writeText "road.txt" ''
Any road followed precisely to its end leads precisely nowhere.
'';
};
}
''
.
./dir-drv
./dir-drv/text
./dir-drv/text/in
./dir-drv/text/in/more
./dir-drv/text/in/more/dirs.txt
./file-drv
'';
dir-drv = writeTextFile {
name = "dir-of-text";
destination = "/text/in/more/dirs.txt";
text = ''
Climb the mountain just a little bit to test that its a mountain.
From the top of the mountain, you cannot see the mountain.
'';
};
}
''
.
./dir-drv
./dir-drv/text
./dir-drv/text/in
./dir-drv/text/in/more
./dir-drv/text/in/more/dirs.txt
./file-drv
'';
}