feat(web/*): drop most things

Change-Id: I85dde8470a4cf737bc193e0b50d0a4b5ee6d7f56
This commit is contained in:
Florian Klink 2025-02-24 17:31:36 +07:00
parent cff6575948
commit 768f053416
26 changed files with 0 additions and 1940 deletions

View file

@ -1,153 +0,0 @@
# This file defines functions for generating an Atom feed.
{ depot, lib, pkgs, ... }:
with depot.nix.yants;
let
inherit (builtins) foldl' map readFile replaceStrings sort;
inherit (lib) concatStrings concatStringsSep max removeSuffix;
inherit (pkgs) runCommand;
# 'link' describes a related link to a feed, or feed element.
#
# https://validator.w3.org/feed/docs/atom.html#link
link = struct "link" {
rel = string;
href = string;
};
# 'entry' describes a feed entry, for example a single post on a
# blog. Some optional fields have been omitted.
#
# https://validator.w3.org/feed/docs/atom.html#requiredEntryElements
entry = struct "entry" {
# Identifies the entry using a universally unique and permanent URI.
id = string;
# Contains a human readable title for the entry. This value should
# not be blank.
title = string;
# Content of the entry.
content = option string;
# Indicates the last time the entry was modified in a significant
# way (in seconds since epoch).
updated = int;
# Names authors of the entry. Recommended element.
authors = option (list string);
# Related web pages, such as the web location of a blog post.
links = option (list link);
# Conveys a short summary, abstract, or excerpt of the entry.
summary = option string;
# Contains the time of the initial creation or first availability
# of the entry.
published = option int;
# Conveys information about rights, e.g. copyrights, held in and
# over the entry.
rights = option string;
};
# 'feed' describes the metadata of the Atom feed itself.
#
# Some optional fields have been omitted.
#
# https://validator.w3.org/feed/docs/atom.html#requiredFeedElements
feed = struct "feed" {
# Identifies the feed using a universally unique and permanent URI.
id = string;
# Contains a human readable title for the feed.
title = string;
# Indicates the last time the feed was modified in a significant
# way (in seconds since epoch). Will be calculated based on most
# recently updated entry if unset.
updated = option int;
# Entries contained within the feed.
entries = list entry;
# Names authors of the feed. Recommended element.
authors = option (list string);
# Related web locations. Recommended element.
links = option (list link);
# Conveys information about rights, e.g. copyrights, held in and
# over the feed.
rights = option string;
# Contains a human-readable description or subtitle for the feed.
subtitle = option string;
};
# Feed generation functions:
renderEpoch = epoch: removeSuffix "\n" (readFile (runCommand "date-${toString epoch}" { } ''
date --date='@${toString epoch}' --utc --iso-8601='seconds' > $out
''));
escape = replaceStrings [ "<" ">" "&" "'" ] [ "&lt;" "&gt;" "&amp;" "&#39;" ];
elem = name: content: ''<${name}>${escape content}</${name}>'';
renderLink = defun [ link string ] (l: ''
<link href="${escape l.href}" rel="${escape l.rel}" />
'');
# Technically the author element can also contain 'uri' and 'email'
# fields, but they are not used for the purpose of this feed and are
# omitted.
renderAuthor = author: ''<author><name>${escape author}</name></author>'';
renderEntry = defun [ entry string ] (e: ''
<entry>
${elem "title" e.title}
${elem "id" e.id}
${elem "updated" (renderEpoch e.updated)}
${if e ? published
then elem "published" (renderEpoch e.published)
else ""
}
${if e ? content
then ''<content type="html">${escape e.content}</content>''
else ""
}
${if e ? summary then elem "summary" e.summary else ""}
${concatStrings (map renderAuthor (e.authors or []))}
${if e ? subtitle then elem "subtitle" e.subtitle else ""}
${if e ? rights then elem "rights" e.rights else ""}
${concatStrings (map renderLink (e.links or []))}
</entry>
'');
mostRecentlyUpdated = defun [ (list entry) int ] (entries:
foldl' max 0 (map (e: e.updated) entries)
);
sortEntries = sort (a: b: a.published > b.published);
renderFeed = defun [ feed string ] (f: ''
<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom">
${elem "id" f.id}
${elem "title" f.title}
${elem "updated" (renderEpoch (f.updated or (mostRecentlyUpdated f.entries)))}
${concatStringsSep "\n" (map renderAuthor (f.authors or []))}
${if f ? subtitle then elem "subtitle" f.subtitle else ""}
${if f ? rights then elem "rights" f.rights else ""}
${concatStrings (map renderLink (f.links or []))}
${concatStrings (map renderEntry (sortEntries f.entries))}
</feed>
'');
in
{
inherit entry feed renderFeed renderEpoch;
}

View file

@ -1,70 +0,0 @@
# This creates the static files that make up my blog from the Markdown
# files in this repository.
#
# All blog posts are rendered from Markdown by cheddar.
{ depot, lib, pkgs, ... }@args:
with depot.nix.yants;
let
inherit (builtins) readFile;
inherit (depot.nix) renderMarkdown;
inherit (depot.web) atom-feed;
inherit (lib) singleton;
# Type definition for a single blog post.
post = struct "blog-post" {
key = string;
title = string;
date = int;
# Optional time at which this post was last updated.
updated = option int;
# Path to the Markdown file containing the post content.
content = path;
# Whether dangerous HTML tags should be filtered in this post. Can
# be disabled to, for example, embed videos in a post.
tagfilter = option bool;
# Optional name of the author to display.
author = option string;
# Should this post be included in the index? (defaults to true)
listed = option bool;
# Is this a draft? (adds a banner indicating that the link should
# not be shared)
draft = option bool;
# Previously each post title had a numeric ID. For these numeric
# IDs, redirects are generated so that old URLs stay compatible.
oldKey = option string;
};
# Rendering fragments for the HTML version of the blog.
fragments = import ./fragments.nix args;
# Functions for generating feeds for these blogs using //web/atom-feed.
toFeedEntry = { baseUrl, ... }: defun [ post atom-feed.entry ] (post: rec {
id = "${baseUrl}/${post.key}";
title = post.title;
content = readFile (renderMarkdown post.content);
published = post.date;
updated = post.updated or post.date;
links = singleton {
rel = "alternate";
href = id;
};
});
in
{
inherit post toFeedEntry;
inherit (fragments) renderPost;
# Helper function to determine whether a post should be included in
# listings (on homepages, feeds, ...)
includePost = post: !(fragments.isDraft post) && !(fragments.isUnlisted post);
}

View file

@ -1,95 +0,0 @@
# This file defines various fragments of the blog, such as the header
# and footer, as functions that receive arguments to be templated into
# them.
#
# An entire post is rendered by `renderPost`, which assembles the
# fragments together in a runCommand execution.
{ depot, lib, pkgs, ... }:
let
inherit (builtins) filter map hasAttr replaceStrings;
inherit (pkgs) runCommand writeText;
inherit (depot.nix) renderMarkdown;
# Generate a post list for all listed, non-draft posts.
isDraft = post: (hasAttr "draft" post) && post.draft;
isUnlisted = post: (hasAttr "listed" post) && !post.listed;
escape = replaceStrings [ "<" ">" "&" "'" ] [ "&lt;" "&gt;" "&amp;" "&#39;" ];
header = name: title: staticUrl: ''
<!DOCTYPE html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="${escape name}">
<link rel="stylesheet" type="text/css" href="${staticUrl}/tvl.css" media="all">
<link rel="icon" type="image/webp" href="/static/favicon.webp">
<link rel="alternate" type="application/atom+xml" title="Atom Feed" href="https://tvl.fyi/feed.atom">
<title>${escape name}: ${escape title}</title>
</head>
<body class="light">
<header>
<h1><a class="blog-title" href="/">${escape name}</a> </h1>
<hr>
</header>
'';
fullFooter = content: ''
<hr>
<footer>
${content}
</footer>
</body>
'';
draftWarning = writeText "draft.html" ''
<p class="cheddar-callout cheddar-warning">
<b>Note:</b> This post is a <b>draft</b>! Please do not share
the link to it without asking first.
</p>
<hr>
'';
unlistedWarning = writeText "unlisted.html" ''
<p class="cheddar-callout cheddar-warning">
<b>Note:</b> This post is <b>unlisted</b>! Please do not share
the link to it without asking first.
</p>
<hr>
'';
renderPost = { name, footer, staticUrl ? "https://static.tvl.fyi/${depot.web.static.drvHash}", ... }: post: runCommand "${post.key}.html" { } ''
cat ${writeText "header.html" (header name post.title staticUrl)} > $out
# Write the post title & date
echo '<article><h2 class="inline">${escape post.title}</h2>' >> $out
echo '<aside class="date">' >> $out
date --date="@${toString post.date}" '+%Y-%m-%d' >> $out
${
if post ? updated
then ''date --date="@${toString post.updated}" '+ (updated %Y-%m-%d)' >> $out''
else ""
}
${if post ? author then "echo ' by ${post.author}' >> $out" else ""}
echo '</aside>' >> $out
${
# Add a warning to draft/unlisted posts to make it clear that
# people should not share the post.
if (isDraft post) then "cat ${draftWarning} >> $out"
else if (isUnlisted post) then "cat ${unlistedWarning} >> $out"
else "# Your ads could be here?"
}
# Write the actual post through cheddar's about-filter mechanism
cat ${renderMarkdown { path = post.content; tagfilter = post.tagfilter or true; }} >> $out
echo '</article>' >> $out
cat ${writeText "footer.html" (fullFooter footer)} >> $out
'';
in
{
inherit isDraft isUnlisted renderPost;
}

View file

@ -1,29 +0,0 @@
# Expose all static assets as a folder. The derivation contains a
# `drvHash` attribute which can be used for cache-busting.
{ depot, lib, pkgs, ... }:
let
storeDirLength = with builtins; (stringLength storeDir) + 1;
logo = depot.web.tvl.logo;
in
lib.fix (self: pkgs.runCommand "tvl-static"
{
passthru = {
# Preserving the string context here makes little sense: While we are
# referencing this derivation, we are not doing so via the nix store,
# so it makes little sense for Nix to police the references.
drvHash = builtins.unsafeDiscardStringContext (
lib.substring storeDirLength 32 self.drvPath
);
};
} ''
mkdir $out
cp -r ${./.}/* $out
cp ${logo.pastelRainbow} $out/logo-animated.svg
cp ${logo.bluePng} $out/logo-blue.png
cp ${logo.greenPng} $out/logo-green.png
cp ${logo.orangePng} $out/logo-orange.png
cp ${logo.purplePng} $out/logo-purple.png
cp ${logo.redPng} $out/logo-red.png
cp ${logo.yellowPng} $out/logo-yellow.png
'')

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 105 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 72 KiB

Binary file not shown.

Binary file not shown.

File diff suppressed because one or more lines are too long

View file

@ -1,136 +0,0 @@
/* Jetbrains Mono font from https://www.jetbrains.com/lp/mono/
licensed under Apache 2.0. Thanks, Jetbrains! */
@font-face {
font-family: jetbrains-mono;
src: url(jetbrains-mono.woff2);
}
@font-face {
font-family: jetbrains-mono;
font-weight: bold;
src: url(jetbrains-mono-bold.woff2);
}
@font-face {
font-family: jetbrains-mono;
font-style: italic;
src: url(jetbrains-mono-italic.woff2);
}
@font-face {
font-family: jetbrains-mono;
font-weight: bold;
font-style: italic;
src: url(jetbrains-mono-bold-italic.woff2);
}
/* Generic-purpose styling */
body {
max-width: 800px;
margin: 40px auto;
line-height: 1.6;
font-size: 18px;
padding: 0 10px;
font-family: jetbrains-mono, monospace;
}
h1, h2, h3 {
line-height: 1.2
}
/* Blog Posts */
article {
line-height: 1.5em;
}
/* spacing between the paragraphs in blog posts */
article p {
margin: 1.4em auto;
}
/* Blog styling */
.light {
color: #383838;
}
.blog-title {
color: inherit;
text-decoration: none;
}
.footer {
text-align: right;
}
.date {
text-align: right;
font-style: italic;
float: right;
}
.inline {
display: inline;
}
.lod {
text-align: center;
}
.uncoloured-link {
color: inherit;
}
pre {
width: 100%;
overflow: auto;
}
code {
background: aliceblue;
}
img {
max-width: 100%;
}
.cheddar-callout {
display: block;
padding: 10px;
}
.cheddar-question {
color: #3367d6;
background-color: #e8f0fe;
}
.cheddar-todo {
color: #616161;
background-color: #eeeeee;
}
.cheddar-tip {
color: #00796b;
background-color: #e0f2f1;
}
.cheddar-warning {
color: #a52714;
background-color: #fbe9e7;
}
kbd {
background-color: #eee;
border-radius: 3px;
border: 1px solid #b4b4b4;
box-shadow: 0 1px 1px rgba(0, 0, 0, .2), 0 2px 0 0 rgba(255, 255, 255, .7) inset;
color: #333;
display: inline-block;
font-size: .85em;
font-weight: 700;
line-height: 1;
padding: 2px 4px;
white-space: nowrap;
}

View file

@ -1,3 +0,0 @@
set noparent
tazjin

View file

@ -1,333 +0,0 @@
We've now been working on our rewrite of Nix, [Tvix][], for a little more than
two years.
Our last written update was in September 2023, and although we did publish a
couple of things in the meantime (flokli's talk on Tvix at [NixCon
2023][nixcon2023], our interview at the [Nix Developer
Dialogues][nix-dev-dialogues-tvix], or tazjin's [talk on
tvix-eval][tvix-eval-ru] (in Russian)), we never found the time to write
something down.
In the meantime a lot of stuff has happened though, so it's time to change that
:-)
Note: This blog post is intended for a technical audience that is already
intimately familiar with Nix, and knows what things like derivations or store
paths are. If you're new to Nix, this will not make a lot of sense to you!
## Evaluation regression testing
Most of the evaluator work has been driven by evaluating `nixpkgs`, and ensuring
that we produce the same derivations, and that their build results end up in the
same store paths.
Builds are not hooked up all the way to the evaluator yet, but for Nix code
without IFD (such as `nixpkgs`!) we can verify this property without building.
An evaluated Nix derivation's `outPath` (and `drvPath`) can be compared with
what C++ Nix produces for the same code, to determine whether we evaluated the
package (and all of its dependencies!) correctly [^1].
We added integration tests in CI that ensure that the paths we calculate match
C++ Nix, and are successfully evaluating fairly complicated expressions in them.
For example, we test against the Firefox derivation, which exercises some of the
more hairy bits in `nixpkgs` (like WASM cross-compilation infrastructure). Yay!
Although we're avoiding fine-grained optimization until we're sure Tvix
evaluates all of `nixpkgs` correctly, we still want to have an idea about
evaluation performance and how our work affects it over time.
For this we extended our benchmark suite and integrated it with
[Windtunnel][windtunnel], which now regularly runs benchmarks and provides a
view into how the timings change from commit to commit.
In the future, we plan to run this as a part of code review, before changes are
applied to our canonical branch, to provide this as an additional signal to
authors and reviewers without having to run the benchmarks manually.
## ATerms, output path calculation, and `builtins.derivation`
We've implemented all of these features, which comprise the components needed to
construct derivations in the Nix language, and to allow us to perform the path
comparisons we mentioned before.
As an interesting side note, in C++ Nix `builtins.derivation` is not actually a
builtin! It is a piece of [bundled Nix code][nixcpp-builtins-derivation], that
massages some parameters and then calls the *actual* builtin:
`derivationStrict`. We've decided to keep this setup, and implemented support in
Tvix to have builtins defined in `.nix` source code.
These builtins return attribute sets with the previously mentioned `outPath` and
`drvPath` fields. Implementing them correctly meant that we needed to implement
output path calculation *exactly* the same way as Nix does (bit-by-bit).
Very little of how this output path calculation works is documented anywhere in
C++ Nix. It uses a subset of [ATerm][aterm] internally, produces "fingerprints"
containing hashes of these ATerms, which are then hashed again. The intermediate
hashes are not printed out anywhere (except if you [patch
Nix][nixcpp-patch-hashes] to do so).
We already did parts of this correctly while starting this work on
[go-nix][go-nix-outpath] some while ago, but found some more edge cases and
ultimately came up with a nicer interface for Tvix.
All the Derivation internal data model, ATerm serialization and output path
calculation have been sliced out into a more general-purpose
[nix-compat][nix-compat-derivation] crate, alongside with more documentation
unit tests and a Derivation ATerm parser, so hopefully this will now be more
accessible for everyone now.
Note our builtin does *not* yet persist the Derivation anywhere "on
disk" (though we have a debug CL that does write it to a temporary directory,
in case we want to track down differences).
## `tvix-[ca]store`
Tvix now has a store implementation!
### The Nix model
Inside Nix, store path contents are normally hashed and communicated in NAR
format, which is very coarse and often wasteful - a single bit of change in one
file in a large store path causes a new NAR file to be uploaded to the binary
cache, which then needs to be downloaded.
Additionally, identifying everything by the SHA256 digest of its NAR
representation makes Nix store paths very incompatible with other
content-addressed systems, as it's a very Nix-specific format.
### The more granular Tvix model
After experimenting with some concepts and ideas in Golang, mostly around how to
improve binary cache performance[^3], both on-disk as well as over the network,
we settled on a more granular, content-addressed and general-purpose format.
Internally, it behaves very similar to how git handles tree objects, except
blobs are identified by their raw BLAKE3 digests rather than some custom
encoding, and similarly, tree/directory objects use the BLAKE3 digest of its
canonical protobuf serialization as identifiers.
This provides some immediate benefits:
- We only need to keep the same data once, even if it's used across different
store paths.
- Transfers can be more granular and only need to fetch the data that's
needed. Due to everything being content-addressed, it can be fetched from
anything supporting BLAKE3 digests, immediately making it compatible with
other P2P systems (IPFS blake3 blobs, …), or general-purpose
content-addressed caches ([bazel-remote]).
There's a lot more details about the data model, certain decisions etc. in
[the docs][castore-docs].
### Compatibility
We however still want to stay compatible with Nix, as in calculating
"NAR-addressed" store paths the same, support substituting from regular Nix
binary caches, as well as storing all the other additional metadata about store
paths.
We accomplished this by splitting the two different concerns into two separate
`tvix-store` and `tvix-castore` crates, with the former one holding all
Nix-specific metadata and functionality, and the latter being a general-purpose
content-addressed blob and filesystem tree storage system, which is usable in a
lot of contexts outside of Tvix too. For example, if you want to use
tvix-castore to write your own git alternative, or provide granular and
authenticated access into large scientific datasets, you could!
### Backends
In addition to a gRPC API and client bindings, there's support for local
filesystem-based backends, as well as for sled, an embedded K/V database.
We're also currently working on a backend supporting most common object
storages, as well as on more granular seeking and content-defined chunking for
blobs.
### FUSE/virtiofs
A tvix-store can be mounted via FUSE, or exposed through virtiofs[^4].
While doing the obvious thing - allowing mounting and browsing the contents
of the store, this will allow lazy substitution of builds on remote builders, be
in containerized or virtualized workloads.
We have an [example][tvix-boot-readme] in the repository seeding gnu hello into
a throwaway store, then booting a MicroVM and executing it.
### nar-bridge, bridging binary caches
`nar-bridge` and the `NixHTTPPathInfoService` bridge `tvix-[ca]store` with
existing Nix binary caches and Nix.
The former exposes a `tvix-[ca]store` over the common Nix HTTP Binary Cache
interface (both read and write).
The latter allows Tvix to substitute from regular Nix HTTP Binary caches,
unpacking NARs and ingesting them on-the-fly into the castore model.
The necessary parsers for NARInfo, signatures etc are also available in the
[nix-compat crate][nix-compat-narinfo].
## EvalIO / builtins interacting with the store more closely
tvix-eval itself is designed to be quite pure when it comes to IO - it doesn't
do any IO directly on its own, but for the very little IO functionality it
does as part of "basic interaction with paths" (like importing other
`.nix` files), it goes through an `EvalIO` interface, which is provided to the
Evaluator struct on instantiation.
This allows us to be a bit more flexible with how IO looks like in practice,
which becomes interesting for specific store implementations that might not
expose a POSIX filesystem directly, or targets where we don't have a filesystem
at all (like WASM).
Using the `EvalIO` trait also lets `tvix-eval` avoid becoming too strongly
coupled to a specific store implementation, hashing scheme etc[^2]. As we can
extend the set of builtins available to the evaluator with "foreign builtins",
these can live in other crates.
Following this pattern, we started implementing some of the "basic" builtins
that deal with path access in `tvix-eval`, like:
- `builtins.pathExists`
- `builtins.readFile`
We also recently started working on more complicated builtins like
`builtins.filterSource` and `builtins.path`, which are also used in `nixpkgs`.
Both import a path into the store, and allow passing a Nix expression that's
used as a filter function for each path. `builtins.path` can also ensuring the
imported contents match a certain hash.
This required the builtin to interact with the store and evaluator in a very
tight fashion, as the filter function (written in Nix) needs to be repeatedly
executed for each path, and its return value is able to cause the store to skip
over certain paths (which it previously couldn't).
Getting the abstractions right there required some back-and-forth, but the
remaining changes should land quite soon.
## Catchables / tryEval
Nix has a limited exception system for dealing with user-generated errors:
`builtins.tryEval` can be used to detect if an expression fails (if
`builtins.throw` or `assert` are used to generate it). This feature requires
extra support in any Nix implementation, as errors may not necessarily cause the
Nix program to abort.
The C++ Nix implementation reuses the C++ language-provided Exception system for
`builtins.tryEval` which Tvix can't (even if Rust had an equivalent system):
In C++ Nix the runtime representation of the program in execution corresponds
to the Nix expression tree of the relevant source files. This means that an
exception raised in C++ code will automatically bubble up correctly since the
C++ and Nix call stacks are equivalent to each other.
Tvix compiles the Nix expressions to a byte code program which may be mutated by
extra optimization rules (for example, we hope to eliminate as many thunks as
possible in the future). This means that such a correspondence between the state
of the runtime and the original Nix code is not guaranteed.
Previously, `builtins.tryEval` (which is implemented in Rust and can access VM
internals) just allowed the VM to recover from certain kinds of errors. This
proved to be insufficient as it [blew up as soon as a `builtins.tryEval`-ed
thunk is forced again][tryeval-infrec] extra bookkeeping was needed. As a
solution, we now store recoverable errors as a separate runtime value type.
As you can imagine, storing evaluation failures as "normal" values quickly leads
to all sorts of bugs because most VM/builtins code is written with only ordinary
values like attribute sets, strings etc. in mind.
While ironing those out, we made sure to supplement those fixes with as many
test cases for `builtins.tryEval` as possible. This will hopefully prevent any
regressions if or rather when we touch this system again. We already have some
ideas for replacing the `Catchable` value type with a cleaner representation,
but first we want to pin down all the unspoken behaviour.
## String contexts
For a long time, we had the [working theory][refscan-string-contexts] that we
could get away with not implementing string contexts, and instead do reference
scanning on a set of "known paths" (and not implement
`builtins.unsafeDiscardStringContext`).
Unfortunately, we discovered that while this is *conceptually* true, due to a
[bug in Nix][string-contexts-nix-bug] that's worked around in the
`stdenv.mkDerivation` implementation, we can't currently do this and calculate
the same hashes.
Because hash compatibility is important for us at this point, we bit the bullet
and added support for string contexts into our `NixString` implementation,
implemented the context-related builtins, and added more unit tests that verify
string context behaviour of various builtins.
## Strings as byte strings
C++ Nix uses C-style zero-terminated strings internally - however, until
recently, Tvix has used standard Rust strings for string values. Since those are
required to be valid UTF-8, we haven't been able to properly represent all the
string values that Nix supports.
We recently converted our internal representation to byte strings, which allows
us to treat a `Vec<u8>` as a "string-like" value.
## JSON/TOML/XML
We added support for the `toJSON`, `toXML`, `fromJSON` and `fromTOML` builtins.
`toXML` is particularly exciting, as it's the only format that allows expressing
(partially applied) functions. It's also used in some of Nix' own test suite, so
we can now include these in our unit test suite (and pass, yay!).
## Builder protocol, drv->builder
We've been working on the builder protocol, and Tvix's internal build
representation.
Nix uses derivations (encoded in ATerm) as nodes in its build graph, but it
refers to other store paths used in that build by these store paths *only*. As
mentioned before, store paths only address the inputs - and not the content.
This poses a big problem in Nix as soon as builds are scheduled on remote
builders: There is no guarantee that files at the same store path on the remote
builder actually have the same contents as on the machine orchestrating the
build. If a package is not binary reproducible, this can lead to so-called
[frankenbuilds][frankenbuild].
This also introduces a dependency on the state that's present on the remote
builder machine: Whatever is in its store and matches the paths will be used,
even if it was maliciously placed there.
To eliminate this hermiticity problem and increase the integrity of builds,
we've decided to use content-addressing in the builder protocol.
We're currently hacking on this at [Thaigersprint](https://thaigersprint.org/)
and might have some more news to share soon!
--------------
That's it for now, try out Tvix and hit us up on IRC or on our mailing list if
you run into any snags, or have any questions.
เจอกันนะ :)
[^1]: We know that we calculated all dependencies correctly because of how their
hashes are included in the hashes of their dependents, and so on. More on
path calculation and input-addressed paths in the next section!
[^2]: That's the same reason why `builtins.derivation[Strict]` also lives in
`tvix-glue`, not in `tvix-eval`.
[^3]: See [nix-casync](https://discourse.nixos.org/t/nix-casync-a-more-efficient-way-to-store-and-substitute-nix-store-paths/16539)
for one example - investing content-defined chunking (while still keeping
the NAR format)
[^4]: Strictly speaking, not limited to tvix-store - literally anything
providing a listing into tvix-castore nodes.
[Tvix]: https://tvix.dev
[aterm]: http://program-transformation.org/Tools/ATermFormat.html
[bazel-remote]: https://github.com/buchgr/bazel-remote/pull/715
[castore-docs]: https://code.tvl.fyi/tree/tvix/docs/src/castore
[frankenbuild]: https://blog.layus.be/posts/2021-06-25-frankenbuilds.html
[go-nix-outpath]: https://github.com/nix-community/go-nix/blob/93cb24a868562714f1691840e94d54ef57bc0a5a/pkg/derivation/hashes.go#L52
[nix-compat-derivation]: https://docs.tvix.dev/rust/nix_compat/derivation/struct.Derivation.html
[nix-compat-narinfo]: https://docs.tvix.dev/rust/nix_compat/narinfo/index.html
[nix-dev-dialogues-tvix]: https://www.youtube.com/watch?v=ZYG3T4l8RU8
[nixcon2023]: https://www.youtube.com/watch?v=j67prAPYScY
[tvix-eval-ru]: https://tazj.in/blog/tvix-eval-talk-2023
[nixcpp-builtins-derivation]: https://github.com/NixOS/nix/blob/49cf090cb2f51d6935756a6cf94d568cab063f81/src/libexpr/primops/derivation.nix#L4
[nixcpp-patch-hashes]: https://github.com/adisbladis/nix/tree/hash-tracing
[refscan-string-contexts]: https://inbox.tvl.su/depot/20230316120039.j4fkp3puzrtbjcpi@tp/T/#t
[store-docs]: https://code.tvl.fyi/about/tvix/docs/src/store/api.md
[string-contexts-nix-bug]: https://github.com/NixOS/nix/issues/4629
[tryeval-infrec]: https://b.tvl.fyi/issues/281
[tvix-boot-readme]: https://code.tvl.fyi/about/tvix/boot/README.md
[why-string-contexts-now]: https://cl.tvl.fyi/c/depot/+/10446/7/tvix/eval/docs/build-references.md
[windtunnel]: https://staging.windtunnel.ci/tvl/tvix

View file

@ -1,266 +0,0 @@
It's already been around half a year since
[the last Tvix update][2024-02-tvix-update], so time for another one!
Note: This blog post is intended for a technical audience that is already
intimately familiar with Nix, and knows what things like derivations or store
paths are. If you're new to Nix, this will not make a lot of sense to you!
## Builds
A long-term goal is obviously to be able to use the expressions in nixpkgs to
build things with Tvix. We made progress on many places towards that goal:
### Drive builds on IO
As already explained in our [first blog post][blog-rewriting-nix], in Tvix, we
want to make IFD a first-class citizen without significant perf cost.
Nix tries hard to split Evaluation and Building into two phases, visible in
the `nix-instantiate` command which produces `.drv` files in `/nix/store` and
the `nix-build` command which can be invoked on such `.drv` files without
evaluation.
Scheduling (like in Hydra) usually happens by walking the graph of `.drv` files
produced in the first phase.
As soon as there's some IFD along the path, everything until then gets built in
the Evaluator (which is why IFD is prohibited in nixpkgs).
Tvix does not have two separate "phases" in a build, only a graph of unfinished
Derivations/Builds and their associated store paths. This graph does not need
to be written to disk, and can grow during runtime, as new Derivations with new
output paths are discovered.
Build scheduling happens continuously with that graph, for everything that's
really needed, when it's needed.
We do this by only "forcing" the realization of a specific store path if the
user ultimately wants that specific result to be available on their system, and
transitively, if something else wants it. This includes IFD in a very elegant
way.
We want to play with this approach as we continue on bringing our build
infrastructure up.
### Fetchers
There's a few Nix builtins that allow describing a fetch (be it download of a
file from the internet, clone of a git repo). These needed to be implemented
for completeness. We implemented pretty much all downloads of Tarballs, NARs and
plain files, except git repositories, which are left for later.
Instead of doing these fetches immediately, we added a generic `Fetch` type
that allows describing such fetches *before actually doing them*, similar to
being able to describe builds, and use the same "Drive builds on IO" machinery
to delay these fetches to the point where it's needed. We also show progress
bars when doing fetches.
Very early, during bootstrapping, nixpkgs relies on some `builtin:fetchurl`
"fake" Derivation, which has some special handling logic in Nix. We implemented
these quirks, by converting it to instances of our `Fetch` type and dealing with
it there in a consistent fashion.
### More fixes, Refscan
With the above work done, and after fixing some small bugs [^3], we were already
able to build some first few store paths with Tvix and our `runc`-based builder
🎉!
We didn't get too far though, as we still need to implement reference scanning,
so that's next on our TODO list for here. Stay tuned for further updates there!
## Eval correctness & Performance
As already written in the previous update, we've been evaluating parts of
`nixpkgs` and ensuring we produce the same derivations. We managed to find and
fix some correctness issues there.
Even though we don't want to focus too much on performance improvements
until all features of Nix are properly understood and representable with our
architecture, there's been some work on removing some obvious and low-risk
performance bottlenecks. Expect a detailed blog post around that soon after
this one!
## Tracing / O11Y Support
Tvix got support for Tracing, and is able to emit spans in
[OpenTelemetry][opentelemetry]-compatible format.
This means, if the necessary tooling is set up to collect such spans [^1], it's
possible to see what's happening inside the different components of Tvix across
process (and machine) boundaries.
Tvix now also propagates trace IDs via gRPC and HTTP requests [^2], and
continues them if receiving such ones.
As an example, this allows us to get "callgraphs" on how a tvix-store operation
is processed through a multi-node deployment, and find bottlenecks and places to
optimize performance for.
Currently, this is compiled in by default, trying to send traces to an endpoint
at `localhost` (as per the official [SDK defaults][otlp-sdk]). It can
be disabled by building without the `otlp` feature, or running with the
`--otlp=false` CLI flag.
This piggy-backs on the excellent [tracing][tracing-rs] crate, which we already
use for structured logging, so while at it, we improved some log messages and
fields to make it easier to filter for certain types of events.
We also added support for sending out [Tracy][tracy] traces, though these are
disabled by default.
Additionally, some CLI entrypoints can now report progress to the user!
For example, when we're fetching something during evaluation
(via `builtins.fetchurl`), or uploading store path contents, we can report on
this. See [here][asciinema-import] for an example.
We're still considering these outputs as early prototypes, and will refine them as
we go.
## tvix-castore ingestion generalization
We spent some time refactoring and generalizing tvix-castore importer code.
It's now generalized on a stream of "ingestion entries" produced in a certain
order, and there's various producers of this stream (reading through the local
filesystem, reading through a NAR, reading through a tarball, soon: traversing
contents of a git repo, …).
This prevented a lot of code duplication for these various formats, and allows
pulling out helper code for concurrent blob uploading.
## More tvix-[ca]store backends
We added some more store backends to Tvix:
- There's a [redb][redb] `PathInfoService` and `DirectoryService`, which
also replaced the previous `sled` default backend.
- There's a [bigtable][bigtable] `PathInfoService` and `DirectoryService`
backend.
- The "simplefs" `BlobService` has been removed, as it can be expressed using
the "objectstore" backend with a `file://` URI.
- There's been some work on feature-flagging certain backends.
## Documentation reconcilation
Various bits and pieces of documentation have previously been scattered
throughout the Tvix codebase, which wasn't very accessible and quite confusing.
These have been consolidated into a mdbook (at `//tvix/docs`).
We plan to properly host these as a website, hopefully providing a better introduction
and overview of Tvix, while adding more content over time.
## `nar-bridge` RIIR
While the golang implementation of `nar-bridge` did serve us well for a while,
it being the only remaining non-Rust part was a bit annoying.
Adding some features there meant they would not be accessible in the rest of
Tvix - and the other way round.
Also, we could not open data stores directly from there, but always had to start
a separate `tvix-store daemon`.
The initial plans for the Rust rewrite were already made quite a while ago,
but we finally managed to finish implementing the remaining bits. `nar-bridge`
is now fully written in Rust, providing the same CLI experience features and
store backends as the rest of Tvix.
## `crate2nix` and overall rust Nix improvements
We landed some fixes in [crate2nix][crate2nix], the tool we're using to for
per-crate incremental builds of Tvix.
It now supports the corner cases needed to build WASM - so now
[Tvixbolt][tvixbolt] is built with it, too.
We also fixed some bugs in how test directories are prepared, which unlocked
running some more tests for filesystem related builtins such as `readDir` in our test suite.
Additionally, there has been some general improvements around ensuring various
combinations of Tvix feature flags build (now continuously checked by CI), and
reducing the amount of unnecessary rebuilds, by filtering non-sourcecode files
before building.
These should all improve DX while working on Tvix.
## Store Composition
Another big missing feature that landed was Store Composition. We briefly spoke
about the Tvix Store Model in the last update, but we didn't go into too much
detail on how that'd work in case there's multiple potential sources for a store
path or some more granular contents (which is pretty much always the case
normally, think about using things from your local store OR then falling back to
a remote place).
Nix has the default model of using `/nix/store` with a sqlite database for
metadata as a local store, and one or multiple "subsituters" using the Nix HTTP
Binary Cache protocol.
In Tvix, things need to be a bit more flexible:
- You might be in a setting where you don't have a local `/nix/store` at all.
- You might want to have a view of different substituters/binary caches for
different users.
- You might want to explicitly specify caches in between some of these layers,
and control their config.
The idea in Tvix is that you'll be able to combine "hierarchies of stores" through
runtime configuration to express all this.
It's currently behind a `xp-store-composition` feature flag, which adds the
optional `--experimental-store-composition` CLI arg, pointing to a TOML file
specifying the composition configuration. If set, this has priority over the old
CLI args for the three (single) stores.
We're still not 100% sure how to best expose this functionality, in terms of the
appropriate level of granularity, in a user-friendly format.
There's also some more combinators and refactors missing, but please let us
know your thoughts!
## Contributors
There's been a lot of progress, which would not have been possible without our
contributors! Be it a small drive-by contributions, or large efforts, thank
you all!
- Adam Joseph
- Alice Carroll
- Aspen Smith
- Ben Webb
- binarycat
- Brian Olsen
- Connor Brewster
- Daniel Mendler
- edef
- Edwin Mackenzie-Owen
- espes
- Farid Zakaria
- Florian Klink
- Ilan Joselevich
- Luke Granger-Brown
- Markus Rudy
- Matthew Tromp
- Moritz Sanft
- Padraic-O-Mhuiris
- Peter Kolloch
- Picnoir
- Profpatsch
- Ryan Lahfa
- Simon Hauser
- sinavir
- sterni
- Steven Allen
- tcmal
- toastal
- Vincent Ambo
- Yureka
---
That's it again, try out Tvix and hit us up on IRC or on our mailing list if you
run into any snags, or have any questions.
[^1]: Essentially, deploying a collecting agent on your machines, accepting
these traces.
[^2]: Using the `traceparent` header field from https://www.w3.org/TR/trace-context/#trace-context-http-headers-format
[^3]: like `builtins.toFile` not adding files yet, or `inputSources` being missed initially, duh!)
[2024-02-tvix-update]: https://tvl.fyi/blog/tvix-update-february-24
[opentelemetry]: https://opentelemetry.io/
[otlp-sdk]: https://opentelemetry.io/docs/languages/sdk-configuration/otlp-exporter/
[tracing-rs]: https://tracing.rs/
[tracy]: https://github.com/wolfpld/tracy
[asciinema-import]: https://asciinema.org/a/Fs4gKTFFpPGYVSna0xjTPGaNp
[blog-rewriting-nix]: https://tvl.fyi/blog/rewriting-nix
[crate2nix]: https://github.com/nix-community/crate2nix
[redb]: https://github.com/cberner/redb
[bigtable]: https://cloud.google.com/bigtable
[tvixbolt]: https://bolt.tvix.dev/

View file

@ -1,42 +0,0 @@
{ depot, ... }:
{
config = {
name = "TVL's blog";
footer = depot.web.tvl.footer { };
baseUrl = "https://tvl.fyi/blog";
};
posts = builtins.sort (a: b: a.date > b.date) [
{
key = "rewriting-nix";
title = "Tvix: We are rewriting Nix";
date = 1638381387;
content = ./rewriting-nix.md;
author = "tazjin";
}
{
key = "tvix-status-september-22";
title = "Tvix Status - September '22";
date = 1662995534;
content = ./tvix-status-202209.md;
author = "tazjin";
}
{
key = "tvix-update-february-24";
title = "Tvix Status - February '24";
date = 1707472132;
content = ./2024-02-tvix-update.md;
author = "flokli";
}
{
key = "tvix-update-august-24";
title = "Tvix Status - August '24";
date = 1723219370;
content = ./2024-08-tvix-update.md;
author = "flokli";
}
];
}

View file

@ -1,90 +0,0 @@
Evaluating the Nix programming language, used by the Nix package
manager, is currently very slow. This becomes apparent in all projects
written in Nix that are not just simple package definitions, for
example:
* the NixOS module system
* TVL projects like
[`//nix/yants`](https://at.tvl.fyi/?q=%2F%2Fnix%2Fyants) and
[`//web/bubblegum`](https://at.tvl.fyi/?q=%2F%2Fweb%2Fbubblegum).
* the code that [generates build
instructions](https://at.tvl.fyi/?q=%2F%2Fops%2Fpipelines) for TVL's
[CI setup](https://tvl.fyi/builds)
Whichever project you pick, they all suffer from issues with the
language implementation. At TVL, it takes us close to a minute to
create the CI instructions for our monorepo at the moment - despite it
being a plain Nix evaluation. Running our Nix-native build systems for
[Go](https://code.tvl.fyi/about/nix/buildGo) and [Common
Lisp](https://code.tvl.fyi/about/nix/buildLisp) takes much more time
than we would like.
Some time last year a few of us got together and started investigating
ways to modernise the current architecture of Nix and figure out how
to improve the speed of some of the components. We created over [250
commits](https://cl.tvl.fyi/q/topic:tvix) in our fork of the Nix 2.3
codebase at the time, tried [performance
experiments](https://cl.tvl.fyi/c/depot/+/1123/) aimed at improving
the current evaluator and fought [gnarly
bugs](https://cl.tvl.fyi/c/depot/+/1504).
After a while we realised that we were treading water: Some of our
ideas are too architecturally divergent from Nix to be done on top of
the existing codebase, and the memory model of Nix causes significant
headaches when trying to do any kind of larger change.
We needed an alternative approach and started brainstorming on a bent
whiteboard in a small flat in Hurghada, Egypt.
![flokli & tazjin brainstorming](https://static.tvl.fyi/latest/files/flokli_tazjin_tvix.webp)
Half a year later we are now ready to announce our new project:
**Tvix**, a re-imagined Nix with full nixpkgs compatibility. Tvix is
generously funded [by NLNet](https://nlnet.nl/project/Tvix/) (thanks!)
and we are ready to start implementing it.
The [Tvix
architecture](https://code.tvl.fyi/about/tvix/docs/components.md) is
designed to be modular: It should be possible to write an evaluator
that plugs in the Guile language (for compatibility with GNU Guix), to
use arbitrary builders, and to replace the store implementation.
Tvix has these high-level goals:
* Creating an alternative implementation of Nix that is **fully
compatible with nixpkgs**.
The package collection is an enormous effort with hundreds of
thousands of commits, encoding expert knowledge about lots of
different software and ways of building and managing it. It is a
very valuable piece of software and we must be able to reuse it.
* More efficient Nix language evaluation, leading to greatly increased
performance.
* No more strict separation of evaluation and build phases: Generating
Nix data structures from build artefacts ("IFD") should be supported
first-class and not incur significant performance cost.
* Well-defined interaction protocols for how the three different
components (evaluator, builder, store) interact.
* A builder implementation using OCI instead of custom sandboxing
code.
![adisbladis & tazjin brainstorming](https://static.tvl.fyi/latest/files/adisbladis_tazjin_tvix.webp)
Tvix is not intended to *replace* Nix, instead we want to improve the
ecosystem by offering an alternative, fast and reliable implementation
for Nix features that are in use today.
As things ramp up we will be posting more information on this blog,
for now you can keep an eye on
[`//tvix`](https://code.tvl.fyi/tree/tvix) in the TVL monorepo
and subscribe to [our feed](https://tvl.fyi/feed.atom).
Stay tuned!
<span style="font-size: small;">PS: TVL is international, but a lot of
the development will take place in our office in Moscow. Say hi if
you're around and interested!</span>

View file

@ -1,165 +0,0 @@
We've now been working on our rewrite of Nix, [Tvix][], for over a
year.
As you can imagine, this past year has been turbulent, to say the
least, given the regions where many of us live. As a result we haven't
had as much time to work on fun things (like open-source software
projects!) as we'd like.
We've all been fortunate enough to continue making progress, but we
just haven't had the bandwidth to communicate with you and keep you up
to speed on what's going on. That's what this blog post is for.
## Nix language evaluator
The most significant progress in the past six months has been on our
Nix language evaluator. To answer the most important question: yes,
you can play with it right now in [Tvixbolt][]!
We got the evaluator into its current state by first listing all the
problems we were likely to encounter, then solving them independently,
and finally assembling all those small-scale solutions into a coherent
whole. As a result, we briefly had an impractically large private
source tree, which we have since [integrated][] into our monorepo.
This process was much slower than we would have liked, due to code
review bandwidth... which is to say, we're all volunteers. People have
lives, bottlenecks happen.
Most of this code was either written or reviewed by [grfn][],
[sterni][] and [tazjin][] (that's me!).
### How much of eval is working?
*Most of it*! You can enter most (but not *all*, sorry! Not yet,
anyway.) Nix language expressions in [Tvixbolt][] and observe how they
are evaluated.
There's a lot of interesting stuff going on under the hood, such as:
* The Tvix compiler can emit warnings and errors without failing
early, and retains as much source information as possible. This will
enable you to use Tvix as the basis for developer tooling, such as
language servers.
* The Tvix compiler performs in-depth scope analysis, so it can both
generate efficient bytecode for accessing identifiers, and alert you
about problems in your code before runtime.
* The runtime supports tail-call optimisation in many (but again
not yet all) cases, so you can evaluate recursive expressions in
constant stack space.
* The runtime can give you different backing representations for the
same Nix type. For example, an attribute set is represented
differently depending on whether you've constructed an empty one, a
`name/value` pair, or a larger set. This lets us optimise frequent,
well-known use-cases without impacting the general case much.
We've run some initial benchmarks against C++ Nix (using the features
that are ready), and in most cases Tvix evaluation is an order of
magnitude faster. To be fair, though, these benchmarks are in no way
indicative of real-life performance for things like `nixpkgs`. More
information is coming... eventually.
### How does it all work?
Tvix's evaluator uses a custom abstract machine with a Nix-specific
instruction set, and a compiler that traverses a parsed Nix AST to
emit this bytecode and perform a set of optimisations and other
analysis. The most important benefit of this is that we can plan and
lay out the execution of a program in a way that is better suited to
an efficient runtime than directly traversing the AST.
TIP: You can see the generated bytecode in [Tvixbolt][]!
This is all written in about 4000 lines of Rust (naturally), some of
which especially around scope-handling are deceptively simple.
As part of our CI suite, we run the evaluator against some tests we
wrote ourselves, as well as against the upstream Nix test suite (which
we don't *quite* pass yet. We're working on it!).
### What's next for tvix-eval?
Despite all our progress, there are still some unfinished feature
areas, and some of them are pretty important:
1. The majority of Nix's builtins including fundamental ones like
`import` and `derivation` aren't implemented yet.
2. Neither are recursive attribute sets (`rec`). This isn't because of
a problem with the recursion itself, but because of the handling of
nested keys (such as `a.b`). We have a lackluster solution already,
but are designing a more efficient one.
In both cases, we've mostly figured out what to do; now it's just a
matter of finding the time to do it. Our progress is steady, and can
be tracked [in the source][src] (viewer without Javascript
[here][src-noscript]).
Apart from that, the next steps are:
* Comprehensive benchmarking. We're standing up an infrastructure for
continuous benchmarking to measure the impact of changes. It'll also
let us identify and optimise hotspots
* Implementing known optimisations. There are some areas of the code
that have the potential for significant speed gains, but we're
holding off implementing those until the evaluator is feature
complete and passes the Nix test suite.
* Finishing our language specification. Based on what we've learned,
we're writing a specification of the Nix language that captures its
various behaviours in all their tricky subtlety and subtle trickery.
Once we can evaluate `nixpkgs`, we're likely to shift our focus
towards the other areas of Tvix.
## The Other Areas of Tvix
Speaking of these other areas (most importantly, the builder and store
implementation), we've made some nice progress there also.
While we've yet to start assembling the actual pieces, [flokli][] and
[adisbladis][] have been hard at work on [go-nix][], which aims to
implement many of the low-level primitives required for the Nix store
and builder (hashing and encoding schemes, archive formats, reference
scanning ...).
We're looking forward to telling you more in the next Tvix status
update!
## Outro ...
We'd be delighted to onboard new contributors to Tvix! Please take a
look at the main [TVL page](https://tvl.fyi) to find out how to get in
touch with us if you'd like to join!
Thanks also, of course, to [NLNet](https://nlnet.nl/) for sponsoring
some of this work!
And finally, we would like to thank and pay our respects to jD91mZM2
the original author of
[rnix-parser](https://github.com/nix-community/rnix-parser) who has
sadly passed away. Please, tell people how important they are to you.
We use `rnix-parser` in our compiler, and its well-designed internals
(also thanks to its new maintainers!) have saved us a lot of time.
That's it for this update. Go play with [Tvixbolt][], have fun
figuring out weird ways to break it and if you do, let us know.
We'll see you around!
[Tvix]: https://tvl.fyi/blog/rewriting-nix
[Tvixbolt]: https://bolt.tvix.dev
[integrated]: https://cl.tvl.fyi/q/status:merged+%2522tvix/eval%2522+mergedbefore:2022-09-09
[src]: https://code.tvl.fyi/tree/tvix/eval
[src-noscript]: https://code.tvl.fyi/tree/tvix/eval
[tazjin]: https://tazj.in
[grfn]: https://gws.fyi/
[sterni]: https://github.com/sternenseemann
[go-nix]: https://github.com/nix-community/go-nix
[flokli]: https://flokli.de/
[adisbladis]: https://github.com/adisbladis

View file

@ -1,141 +0,0 @@
{ depot, lib, pkgs, ... }:
with depot.nix.yants;
let
inherit (builtins) filter;
inherit (pkgs) graphviz runCommand writeText;
inherit (depot.web) atom-feed blog tvl;
listPosts = defun [ (list blog.post) string ] (posts:
lib.concatStringsSep "\n" (map (p: "* [${p.title}](blog/${p.key})") posts)
);
postRenderingCommands = defun [ (list blog.post) string ] (posts:
lib.concatStringsSep "\n"
(map (p: "cp ${blog.renderPost tvl.blog.config p} $out/blog/${p.key}.html") posts)
);
tvlGraph = runCommand "tvl.svg"
{
nativeBuildInputs = with pkgs; [ fontconfig freetype cairo jetbrains-mono ];
} ''
${graphviz}/bin/neato -Tsvg ${./tvl.dot} > $out
'';
publishedPosts = filter blog.includePost tvl.blog.posts;
feed = {
id = "https://tvl.fyi/";
title = "TVL blog";
subtitle = "Thoughts and news from The Virus Lounge";
authors = [ "tazjin" ]; # TODO(tazjin): Extract from post info
links = lib.singleton {
rel = "self";
href = "https://tvl.fyi/feed.atom";
};
entries = map (blog.toFeedEntry tvl.blog.config) publishedPosts;
};
atomFeed = writeText "feed.atom" (atom-feed.renderFeed feed);
homepage = tvl.template {
title = "The Virus Lounge";
content = ''
The Virus Lounge
================
----------------
<img class="tvl-logo" src="https://static.tvl.fyi/${depot.web.static.drvHash}/logo-animated.svg"
alt="Virus with lambda-shaped spike proteins sitting on an armchair">
Welcome to **The Virus Lounge**. We're a group of people who got
together in 2020, when we felt that there was not enough
spontaneous socialising on the internet.
Because of our shared interests in topics like **build systems**
and **monorepos** we started working on code together, in our
monorepo called the *depot*.
Feel free to explore the tech we have built so far, all our
systems are linked in the footer.
----------------
## Blog
Here are the most recent TVL blog posts.
${listPosts publishedPosts}
You can also follow our [atom feed](https://tvl.fyi/feed.atom).
----------------
## Getting in touch
We mostly hang out on IRC. You can find us in [`#tvl`][tvl-irc]
on [hackint][], which is also reachable [via XMPP][hackint-xmpp]
at [`#tvl@irc.hackint.org`][tvl-xmpp] (sic!) and [via
Matrix][hackint-matrix] at [`#tvl:hackint.org`][tvl-matrix].
Hackint also provide a [web chat][tvl-webchat].
[tvl-irc]: ircs://irc.hackint.org:6697/#tvl
[hackint]: https://hackint.org/
[hackint-xmpp]: https://hackint.org/transport/xmpp
[tvl-xmpp]: xmpp:#tvl@irc.hackint.org?join
[hackint-matrix]: https://hackint.org/transport/matrix
[tvl-matrix]: https://matrix.to/#/#tvl:hackint.org
[tvl-webchat]: https://webirc.hackint.org/#ircs://irc.hackint.org/#tvl
Discussions of our software, patches, and anything else really
can also be sent to us via email to **depot@tvl.su**. You can
see the mails submitted to that list in our [public inbox][].
[public inbox]: https://inbox.tvl.su
----------------
## Where did all these people come from?
It's pretty straightforward. Feel free to click on people, too.
<div class="tvl-graph-container">
<!--
cheddar leaves HTML inside of HTML alone,
so wrapping the SVG prevents it from messing it up
-->
${builtins.readFile tvlGraph}
</div>
'';
extraHead = ''
<style>
.tvl-graph-container {
max-width: inherit;
}
.tvl-graph-container svg {
max-width: inherit;
height: auto;
}
.tvl-logo {
width: 60%;
display: block;
margin-left: auto;
margin-right: auto;
}
</style>
'';
};
in
runCommand "website" { } ''
mkdir -p $out/blog
cp ${homepage} $out/index.html
${postRenderingCommands tvl.blog.posts}
cp ${atomFeed} $out/feed.atom
''

View file

@ -1,21 +0,0 @@
# Footer fragment for TVL homepages, used by //web/tvl/template for
# our static pages and also via //web/blog for blog posts.
{ lib, ... }:
args: ''
<p class="footer">
<a class="uncoloured-link" href="https://at.tvl.fyi/?q=%2F%2FREADME.md">code</a>
|
<a class="uncoloured-link" href="https://cl.tvl.fyi/">reviews</a>
|
<a class="uncoloured-link" href="https://tvl.fyi/builds">ci</a>
|
<a class="uncoloured-link" href="https://b.tvl.fyi/">bugs</a>
|
<a class="uncoloured-link" href="https://todo.tvl.fyi/">todos</a>
|
<a class="uncoloured-link" href="https://atward.tvl.fyi/">search</a>
'' + lib.optionalString (args ? extraFooter) args.extraFooter + ''
</p>
<p class="lod">_</p>
''

View file

@ -1,97 +0,0 @@
# Creates an output containing the logo in SVG format (animated and
# static, one for each background colour) and without animations in
# PNG.
{ depot, lib, pkgs, ... }:
let
palette = {
purple = "#CC99C9";
blue = "#9EC1CF";
green = "#9EE09E";
yellow = "#FDFD97";
orange = "#FEB144";
red = "#FF6663";
};
staticCss = colour: ''
#armchair-background {
fill: ${colour};
}
'';
# Create an animated CSS that equally spreads out the colours over
# the animation duration (1min).
animatedCss = colours:
let
# Calculate at which percentage offset each colour should appear.
stepSize = 100 / ((builtins.length colours) - 1);
frames = lib.imap0 (idx: colour: { inherit colour; at = idx * stepSize; }) colours;
frameCss = frame: "${toString frame.at}% { fill: ${frame.colour}; }";
in
''
#armchair-background {
animation: 30s infinite alternate armchairPalette;
}
@keyframes armchairPalette {
${lib.concatStringsSep "\n" (map frameCss frames)}
}
'';
# Dark version of the logo, suitable for light backgrounds.
darkCss = armchairCss: ''
.structure {
fill: #383838;
}
#letters {
fill: #fefefe;
}
${armchairCss}
'';
# Light version, suitable for dark backgrounds.
lightCss = armchairCss: ''
.structure {
fill: #e4e4ef;
}
#letters {
fill: #181818;
}
${armchairCss}
'';
logoShapes = builtins.readFile ./logo-shapes.svg;
logoSvg = style: ''
<svg xmlns="http://www.w3.org/2000/svg" xml:space="preserve" viewBox="420 860 1640 1500"
xmlns:xlink="http://www.w3.org/1999/xlink">
<style>${style}</style>
${logoShapes}
</svg>
'';
in
depot.nix.readTree.drvTargets (lib.fix (self: {
# Expose the logo construction functions.
inherit palette darkCss lightCss animatedCss staticCss;
# Create a TVL logo SVG with the specified style.
logoSvg = style: pkgs.writeText "logo.svg" (logoSvg style);
# Create a PNG of the TVL logo with the specified style and DPI.
logoPng = style: dpi: pkgs.runCommand "logo.png" { } ''
${pkgs.inkscape}/bin/inkscape \
--export-area-drawing \
--export-background-opacity 0 \
--export-dpi ${toString dpi} \
${self.logoSvg style} -o $out
'';
# Animated dark SVG logo with all colours.
pastelRainbow = self.logoSvg (darkCss (animatedCss (lib.attrValues palette)));
}
# Add individual outputs for static dark logos of each colour.
// (lib.mapAttrs'
(k: v: lib.nameValuePair "${k}Png"
(self.logoPng (darkCss (staticCss v)) 96))
palette)))

View file

@ -1,27 +0,0 @@
<polygon id="armchair-background" points="463 2030 567 1814 1904 1814 1978 2030 1935 2169 1720 2155 1590 2311 873 2305 778 2142 570 2186"/>
<g class="structure">
<path id="virusbody" d="M 707.524,1820.74 701.038,1401.58 970,1100 h 542 l 271.37,310.47 -16.99,419.17 -295.93,284.34 -445.46,-4.15 z"/>
</g>
<g class="structure" id="lambdas">
<!-- virus lambdas and feet, clockwise starting at the top left -->
<path id="topleft" d="m 1002,1045 38,75 -65,35 -140,-260 h 78 l 47,80 45,-80 h 45 l 17.39,34.968" />
<use id="topright" xlink:href="#topleft" transform="matrix(-1,0,0,1,2482,0)" />
<use id="midright" xlink:href="#topleft" transform="matrix(-0.70710678,-0.70710678,-0.70710678,0.70710678,3284.799,1331.4128)" />
<use id="bottomright" xlink:href="#topleft" transform="matrix(-0.25881905,-0.96592583,-0.96592583,0.25881905,3120.6829,2438.0653)" />
<use id="rightfoot" xlink:href="#topleft" transform="matrix(-0.60515932,0.14752194,-0.14752194,-0.60515932,2234.5287,2616.7665)" />
<use id="leftfoot" xlink:href="#topleft" transform="matrix(0.60515932,0.14752194,0.14752194,-0.60515932,253.62404,2616.7665)" />
<use id="bottomleft" xlink:href="#topleft" transform="rotate(-75,1263.0635,1635.2798)" />
<use id="midleft" xlink:href="#topleft" transform="rotate(-45,1209.002,1626.9386)" />
</g>
<g class="structure" id="armchair">
<path d="M742.781 2172.23s-89.208 93.93-210.767 22.78c-121.56-71.14-124.755-220.09-47.72-318 78.865-100.24 220.899-86.94 221.229-85.38.274 1.3 247.178 196.08 328.597 260.28 16.08 12.68 25.71 20.27 25.71 20.27l-37.68 41.02s-209.519-177.76-290.729-250.45c-9.975 1.38-150.662-67.27-214.983 108.51-24.251 74.65 15.983 145.09 69.889 167.71 91.689 19.32 94.88 1.94 121.523-18.39"/>
<path d="M1738.4 2174.64s91.9 88.75 209.97 16.51c118.07-72.25 115.91-216.85 39.26-313.11-78.47-98.55-217.31-83.5-217.61-81.95-.26 1.29-239.43 197.97-318.3 262.8-15.58 12.8-24.9 20.46-24.9 20.46l37.4 40.26s202.73-184.66 281.29-257.92c9.78 1.23 134.36-50.54 211.78 110.07 28.32 92.64-13.71 144.64-66.18 167.81-89.5 20.38-90.29.61-116.63-19.24"/>
<path d="m899.02 2276.92 680.44-.32 98.56-134.61 51.64 32.46-121.94 160.78-739.1-1.03-125.507-162.22 54.172-39.79 101.735 144.73Z"/>
<path d="m744.143 2173.36 56.05-35.55s-44.914-79.17-102.074-8.6"/>
<path d="M1728.8 2176.06c-7.6 2.16-53.69-30.58-53.69-30.58s43.06-84.48 102.63-21.21c59.57 63.27-52.85 47.65-48.94 51.79Z"/>
</g>
<g id="letters" fill="#fefefe">
<path id="t" d="M970.081 1776.8c-22.214 0-40.017-6.45-53.41-19.35-13.394-12.9-20.09-30.14-20.09-51.7v-158.27h-75.95v-40.18h75.95v-75.95h44.1v75.95h107.799v40.18H940.681v158.27c0 9.15 2.695 16.58 8.085 22.3 5.39 5.72 12.495 8.57 21.315 8.57h73.499v40.18h-73.499Z"/>
<path id="v" d="M 1205.77 1776.8 L 1112.18 1507.3 L 1157.75 1507.3 L 1235.66 1742.99 L 1311.12 1507.3 L 1357.18 1507.3 L 1263.59 1776.8 L 1205.77 1776.8 L 1205.77 1776.8 Z"/>
<path id="lambda" d="M 1406.18 1776.8 L 1506.14 1511.71 L 1469.88 1419.1 L 1516.92 1419.1 L 1651.18 1776.8 L 1604.14 1776.8 L 1539.95 1601.87 L 1530.64 1571.49 L 1453.71 1776.8 L 1406.18 1776.8 Z"/>
</g>

View file

@ -1,52 +0,0 @@
{ depot, pkgs, lib, ... }:
{
# content of the <title> tag
title
# main part of the page, usually wrapped with <main>
, content
# optional extra html to inject into <head>
, extraHead ? null
# optional extra html to inject into <footer>
, extraFooter ? null
# URL at which static assets are located
, staticUrl ? "https://static.tvl.fyi/${depot.web.static.drvHash}"
}@args:
let
inherit (pkgs) runCommand lib;
inherit (depot.tools) cheddar;
in
runCommand "${lib.strings.sanitizeDerivationName title}-index.html"
{
headerPart = ''
<!DOCTYPE html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="The Virus Lounge">
<link rel="stylesheet" type="text/css" href="${staticUrl}/tvl.css" media="all">
<link rel="icon" type="image/webp" href="${staticUrl}/favicon.webp">
<link rel="alternate" type="application/atom+xml" title="Atom Feed" href="https://tvl.fyi/feed.atom">
<title>${title}</title>
'' + lib.optionalString (args ? extraHead) extraHead + ''
</head>
<body class="light">
'';
inherit content;
footerPart = ''
<hr>
<footer>
${depot.web.tvl.footer args}
</footer>
</body>
'';
passAsFile = [ "headerPart" "content" "footerPart" ];
} ''
${cheddar}/bin/cheddar --about-filter content.md < $contentPath > rendered.html
cat $headerPartPath rendered.html $footerPartPath > $out
''

View file

@ -1,173 +0,0 @@
digraph tvl {
node [fontname = "JetBrains Mono"];
overlap = false;
splines = polyline;
TVL [style="bold" href="http://tvl.fyi"];
tazjin -> TVL [style="bold"];
// people
subgraph {
Irenes [href="https://www.pluralpride.com/"];
K900 [href="https://0upti.me/"];
Profpatsch [href="http://profpatsch.de/"];
adisbladis [href="http://nixos.expert/"];
amjoseph;
andi [label="andi-" href="https://andreas.rammhold.de/"];
aurora [href="https://nonegenderleftfox.aventine.se/"];
benjojo [href="https://benjojo.co.uk/"];
cynthia [href="https://cynthia.re/"];
edef [href="https://edef.eu/files/edef.hs"];
ericvolp [href="https://ericv.me"];
espes;
eta [href="https://theta.eu.org/"];
etu [href="https://elis.nu/"];
ezemtsov [href="https://github.com/ezemtsov"];
firefly [href="http://firefly.nu/"];
flokli [href="https://flokli.de/"];
fzakaria [href="https://fzakaria.com/"];
ghuntley [href="https://ghuntley.com/"];
aspen [href="http://gws.fyi"];
implr [href="https://twitter.com/implring"];
isomer [href="https://www.lorier.net/"];
j4m3s [href="https://github.com/j4m3s-s"];
jusrin [href="https://jusrin.dev/"];
kn;
lassulus;
leah2 [href="https://leahneukirchen.org/"];
lukegb [href="https://lukegb.com/"];
marcusr [href="http://marcus.nordaaker.com/"];
ncl;
nikky [href="http://nikky.moe/"];
nyanotech [href="https://twitter.com/nyanotech"];
seven [href="https://open.spotify.com/user/so7"];
sterni [href="https://sterni.lv/"];
tazjin [href="https://tazj.in/"];
wpcarro [href="https://wpcarro.dev/"];
raitobezarius [href="https://ryan.lahfa.xyz/"];
vova [href="https://github.com/vkryachko"];
yuuko;
}
// companies (blue)
subgraph {
node [color="#4285f4" fontcolor="#4285f4"];
spotify [href="https://www.spotify.com/"];
google [href="https://www.google.com/"];
}
// communities? (red)
subgraph {
node [color="#db4437" fontcolor="#db4437"];
eve [href="https://www.eveonline.com/"];
nix [href="https://nixos.org/nix/"];
tvix [href="https://code.tvl.fyi/tree/tvix"];
ircv3 [href="https://ircv3.net/"];
muccc [label="µccc" href="https://muc.ccc.de/"];
afra [label="AfRA" href="https://afra-berlin.de/"];
}
// special
subgraph {
baby [color="pink" fontcolor="pink" href="https://cynthia.re/s/baby"];
unspecific [color="grey" fontcolor="grey"];
}
// primary edges (how did they end up in TVL?)
subgraph {
// Direct edges
nix -> TVL;
tvix -> TVL;
spotify -> tazjin;
google -> tazjin;
eve -> tazjin;
unspecific -> tazjin;
edef -> tazjin;
ezemtsov -> tazjin;
// via nix
adisbladis -> nix;
jusrin -> nix;
ghuntley -> nix;
flokli -> nix;
andi -> nix;
Profpatsch -> nix;
lassulus -> nix;
etu -> nix;
vova -> nix;
// via tvix
j4m3s -> tvix;
amjoseph -> tvix;
K900 -> tvix;
// via edef
benjojo -> edef;
espes -> edef;
firefly -> edef;
leah2 -> aurora;
ncl -> edef;
// via spotify
seven -> spotify;
// via google
Irenes -> google;
isomer -> google;
lukegb -> google;
wpcarro -> google;
fzakaria -> google;
// random primary
aspen -> wpcarro;
aurora -> eve;
cynthia -> benjojo;
eta -> unspecific;
ericvolp -> lukegb;
marcusr -> unspecific;
implr -> lukegb;
afra -> unspecific;
nikky -> afra;
kn -> flokli;
sterni -> Profpatsch;
yuuko -> ncl;
raitobezarius -> flokli;
}
// secondary edges (how are they connected otherwise?)
subgraph {
edge [weight=0 style="dotted" color="grey" arrowhead="none"];
// ircv3
eta -> ircv3;
firefly -> ircv3;
raitobezarius -> ircv3;
// µccc
leah2 -> muccc;
// random
leah2 -> edef;
lukegb -> isomer;
eta -> firefly;
cynthia -> firefly;
cynthia -> lukegb;
implr -> google;
nyanotech -> google;
lukegb -> benjojo;
espes -> benjojo;
espes -> aurora;
aspen -> nix;
edef -> nix;
ezemtsov -> nix;
raitobezarius -> nix;
}
// baby
subgraph {
edge [weight=0 style="dotted" color="pink" arrowhead="none"];
cynthia -> baby;
eta -> baby;
}
}