The number of jobs in the depot pipeline is reaching the limits of the Buildkite backend's ability for a single pipeline upload. Based on a conversation with their support my understanding is that this has to do with internal locking mechanisms at Buildkite. To work around this, we can instead chunk the pipeline into several smaller chunks that are uploaded serially. This commit introduces logic to chunk the pipeline accordingly. The chunk size chosen is 256 for now (a multiple of our number of agents, which is useful if we can get builds from the first chunk to start before the next ones are uploaded). Note that this chunk size is significantly below even the current number of targets (~460 as of this commit), but choosing a lower chunk size might alleviate problems we've been seeing with timeouts during pipeline uploads. Change-Id: I77030aaf8b874c330218b78c77d15216e13b9af7 Reviewed-on: https://cl.tvl.fyi/c/depot/+/4332 Tested-by: BuildkiteCI Reviewed-by: wpcarro <wpcarro@gmail.com> Autosubmit: tazjin <mail@tazj.in>
115 lines
4.1 KiB
Nix
115 lines
4.1 KiB
Nix
# This file configures the primary build pipeline used for the
|
|
# top-level list of depot targets.
|
|
#
|
|
# It outputs a "YAML" (actually JSON) file which is evaluated and
|
|
# submitted to Buildkite at the start of each build. This means we can
|
|
# dynamically configure the pipeline execution here.
|
|
{ depot, lib, pkgs, ... }:
|
|
|
|
let
|
|
inherit (builtins)
|
|
attrValues
|
|
concatStringsSep
|
|
foldl'
|
|
length
|
|
map
|
|
mapAttrs
|
|
toJSON;
|
|
|
|
inherit (pkgs) runCommandNoCC symlinkJoin writeText;
|
|
|
|
# Create an expression that builds the target at the specified
|
|
# location.
|
|
mkBuildExpr = target:
|
|
let
|
|
descend = expr: attr: "builtins.getAttr \"${attr}\" (${expr})";
|
|
targetExpr = foldl' descend "import ./. {}" target.__readTree;
|
|
subtargetExpr = descend targetExpr target.__subtarget;
|
|
in if target ? __subtarget then subtargetExpr else targetExpr;
|
|
|
|
# Create a pipeline label from the targets tree location.
|
|
mkLabel = target:
|
|
let label = concatStringsSep "/" target.__readTree;
|
|
in if target ? __subtarget
|
|
then "${label}:${target.__subtarget}"
|
|
else label;
|
|
|
|
# Create a pipeline step from a single target.
|
|
mkStep = target: {
|
|
command = let
|
|
drvPath = builtins.unsafeDiscardStringContext target.drvPath;
|
|
in lib.concatStringsSep " " [
|
|
# First try to realise the drvPath of the target so we don't evaluate twice.
|
|
# Nix has no concept of depending on a derivation file without depending on
|
|
# at least one of its `outPath`s, so we need to discard the string context
|
|
# if we don't want to build everything during pipeline construction.
|
|
"nix-store --realise '${drvPath}'"
|
|
# Since we don't gcroot the derivation files, they may be deleted by the
|
|
# garbage collector. In that case we can reevaluate and build the attribute
|
|
# using nix-build.
|
|
"|| (test ! -f '${drvPath}' && nix-build -E '${mkBuildExpr target}' --show-trace)"
|
|
];
|
|
label = ":nix: ${mkLabel target}";
|
|
|
|
# Skip build steps if their out path has already been built.
|
|
skip = let
|
|
shouldSkip = with builtins;
|
|
# Only skip in real Buildkite builds
|
|
(getEnv "BUILDKITE_BUILD_ID" != "") &&
|
|
# Always build everything for the canon branch.
|
|
(getEnv "BUILDKITE_BRANCH" != "refs/heads/canon") &&
|
|
# Discard string context to avoid realising the store path during
|
|
# pipeline construction.
|
|
(pathExists (unsafeDiscardStringContext target.outPath));
|
|
in if shouldSkip then "Target was already built." else false;
|
|
};
|
|
|
|
# Protobuf check step which validates that changes to .proto files
|
|
# between revisions don't cause backwards-incompatible or otherwise
|
|
# flawed changes.
|
|
protoCheck = {
|
|
command = "${depot.nix.bufCheck}/bin/ci-buf-check";
|
|
label = ":water_buffalo:";
|
|
};
|
|
|
|
# All pipeline steps before batching them into smaller chunks.
|
|
allSteps =
|
|
# Create build steps for each CI target
|
|
(map mkStep depot.ci.targets)
|
|
|
|
++ [
|
|
# Simultaneously run protobuf checks
|
|
protoCheck
|
|
];
|
|
|
|
# Helper function to inelegantly divide a list into chunks of at
|
|
# most n elements.
|
|
#
|
|
# This works by assigning each element a chunk ID based on its
|
|
# index, and then grouping all elements by their chunk ID.
|
|
chunksOf = n: list: let
|
|
chunkId = idx: toString (idx / n + 1);
|
|
assigned = lib.imap1 (idx: value: { inherit value ; chunk = chunkId idx; }) list;
|
|
unchunk = mapAttrs (_: elements: map (e: e.value) elements);
|
|
in unchunk (lib.groupBy (e: e.chunk) assigned);
|
|
|
|
# Define a build pipeline chunk as a JSON file, using the pipeline
|
|
# format documented on
|
|
# https://buildkite.com/docs/pipelines/defining-steps.
|
|
makePipelineChunk = chunkId: chunk: rec {
|
|
filename = "chunk-${chunkId}.json";
|
|
path = writeText filename (toJSON {
|
|
steps = chunk;
|
|
});
|
|
};
|
|
|
|
pipelineChunks = attrValues (mapAttrs makePipelineChunk (chunksOf 256 allSteps));
|
|
|
|
in runCommandNoCC "depot-pipeline" {} ''
|
|
mkdir $out
|
|
echo "Generated ${toString (length pipelineChunks)} pipeline chunks"
|
|
${
|
|
lib.concatMapStringsSep "\n"
|
|
(chunk: "cp ${chunk.path} $out/${chunk.filename}") pipelineChunks
|
|
}
|
|
''
|