chore(contrib): upgrade to 2024 edition

Part of #114
This edition change doesn't affect these crates except for formatting.

Change-Id: I441c967b6803f1f839379c2ec13dee3f0958fb05
Reviewed-on: https://cl.snix.dev/c/snix/+/30593
Autosubmit: Bence Nemes <nemes.bence1@gmail.com>
Reviewed-by: Ilan Joselevich <personal@ilanjoselevich.com>
Reviewed-by: Florian Klink <flokli@flokli.de>
Tested-by: besadii
This commit is contained in:
Starnick4444 2025-07-01 08:28:28 +02:00 committed by clbot
parent d829261a09
commit f242bc6df8
27 changed files with 36 additions and 30 deletions

View file

@ -1581,7 +1581,7 @@ rec {
"crunch-v2" = rec {
crateName = "crunch-v2";
version = "0.1.0";
edition = "2021";
edition = "2024";
crateBin = [
{
name = "crunch-v2";

View file

@ -1,7 +1,7 @@
[package]
name = "crunch-v2"
version = "0.1.0"
edition = "2021"
edition = "2024"
[workspace]
members = ["."]

View file

@ -0,0 +1 @@
edition = "2024"

View file

@ -16,8 +16,8 @@ use prost::Message;
use polars::{
chunked_array::builder::AnonymousOwnedListBuilder,
prelude::{
df, BinaryChunkedBuilder, ChunkedBuilder, DataFrame, DataType, Field, ListBuilderTrait,
NamedFrom, ParquetWriter, PrimitiveChunkedBuilder, Series, UInt32Type,
BinaryChunkedBuilder, ChunkedBuilder, DataFrame, DataType, Field, ListBuilderTrait,
NamedFrom, ParquetWriter, PrimitiveChunkedBuilder, Series, UInt32Type, df,
},
series::IntoSeries,
};

View file

@ -15,7 +15,7 @@ mod remote;
use anyhow::Result;
use clap::Parser;
use futures::{stream, StreamExt, TryStreamExt};
use futures::{StreamExt, TryStreamExt, stream};
use indicatif::{ProgressBar, ProgressStyle};
use std::{
io::{self, BufRead, Read, Write},
@ -24,7 +24,7 @@ use std::{
};
use polars::{
prelude::{col, LazyFrame, ScanArgsParquet},
prelude::{LazyFrame, ScanArgsParquet, col},
sql::sql_expr,
};

View file

@ -5,16 +5,16 @@ use std::{
task::{self, Poll},
};
use anyhow::{bail, Result};
use anyhow::{Result, bail};
use bytes::{Buf, Bytes};
use futures::{future::BoxFuture, Future, FutureExt, Stream, StreamExt};
use futures::{Future, FutureExt, Stream, StreamExt, future::BoxFuture};
use lazy_static::lazy_static;
use tokio::runtime::Handle;
use nix_compat::nixbase32;
use rusoto_core::{ByteStream, Region};
use rusoto_s3::{GetObjectOutput, GetObjectRequest, S3Client, S3};
use rusoto_s3::{GetObjectOutput, GetObjectRequest, S3, S3Client};
use bzip2::read::BzDecoder;
use xz2::read::XzDecoder;
@ -106,7 +106,7 @@ impl FileKey {
&self,
offset: u64,
e_tag: Option<&str>,
) -> impl Future<Output = io::Result<GetObjectOutput>> + Send + 'static {
) -> impl Future<Output = io::Result<GetObjectOutput>> + Send + 'static + use<> {
let input = GetObjectRequest {
bucket: BUCKET.to_string(),
key: format!(

View file

@ -3682,7 +3682,7 @@ rec {
"fetchroots" = rec {
crateName = "fetchroots";
version = "0.0.0";
edition = "2021";
edition = "2024";
crateBin = [
{
name = "fetchroots";

View file

@ -1,7 +1,7 @@
[package]
name = "fetchroots"
version = "0.0.0"
edition = "2021"
edition = "2024"
[workspace]
members = ["."]

View file

@ -0,0 +1 @@
edition = "2024"

View file

@ -28,7 +28,7 @@ use nix_compat::nixbase32;
use polars::prelude::*;
use tokio::{
sync::Semaphore,
task::{block_in_place, JoinSet},
task::{JoinSet, block_in_place},
};
#[derive(Debug)]

View file

@ -2749,7 +2749,7 @@ rec {
"narinfo2parquet" = rec {
crateName = "narinfo2parquet";
version = "0.1.0";
edition = "2021";
edition = "2024";
crateBin = [
{
name = "narinfo2parquet";

View file

@ -1,7 +1,7 @@
[package]
name = "narinfo2parquet"
version = "0.1.0"
edition = "2021"
edition = "2024"
# We can't join the //tvix workspace, because that locks zstd
# at an ancient version, which is incompatible with polars

View file

@ -0,0 +1 @@
edition = "2024"

View file

@ -7,7 +7,7 @@
//! mkdir narinfo.pq && ls narinfo.zst | parallel --bar 'narinfo2parquet {}'
//! ```
use anyhow::{bail, Context, Result};
use anyhow::{Context, Result, bail};
use jemallocator::Jemalloc;
use nix_compat::{
narinfo::{self, NarInfo},

View file

@ -4193,7 +4193,7 @@ rec {
"snixbolt" = rec {
crateName = "snixbolt";
version = "0.1.0";
edition = "2021";
edition = "2024";
src = lib.cleanSourceWith { filter = sourceFilter; src = ./.; };
type = [ "cdylib" ];
dependencies = [

View file

@ -1,7 +1,7 @@
[package]
name = "snixbolt"
version = "0.1.0"
edition = "2021"
edition = "2024"
[lib]
crate-type = ["cdylib"]

View file

@ -0,0 +1 @@
edition = "2024"

View file

@ -11,8 +11,8 @@ use snix_eval::observer::{DisassemblingObserver, TracingObserver};
use wasm_bindgen::prelude::wasm_bindgen;
use web_sys::HtmlDetailsElement;
use web_sys::HtmlTextAreaElement;
use yew::prelude::*;
use yew::TargetCast;
use yew::prelude::*;
use yew_router::history::BrowserHistory;
use yew_router::history::History;

View file

@ -4348,7 +4348,7 @@ rec {
"turbofetch" = rec {
crateName = "turbofetch";
version = "0.1.0";
edition = "2021";
edition = "2024";
crateBin = [
{
name = "turbofetch";

View file

@ -1,7 +1,7 @@
[package]
name = "turbofetch"
version = "0.1.0"
edition = "2021"
edition = "2024"
# We don't join the //snix workspace, as this is fairly cache.nixos.org-specific.
[workspace]

View file

@ -0,0 +1 @@
edition = "2024"

View file

@ -21,9 +21,9 @@
//! write any output.
use bytes::Bytes;
use futures::{stream::FuturesUnordered, Stream, TryStreamExt};
use futures::{Stream, TryStreamExt, stream::FuturesUnordered};
use rusoto_core::ByteStream;
use rusoto_s3::{GetObjectRequest, PutObjectRequest, S3Client, S3};
use rusoto_s3::{GetObjectRequest, PutObjectRequest, S3, S3Client};
use serde::Deserialize;
use std::{io::Write, mem, ops::Range, ptr};
use tokio::{
@ -34,7 +34,7 @@ use tokio::{
/// Fetch a group of keys, streaming concatenated chunks as they arrive from S3.
/// `keys` must be a slice from the job file. Any network error at all fails the
/// entire batch, and there is no rate limiting.
fn fetch(keys: &[[u8; 32]]) -> impl Stream<Item = io::Result<Bytes>> {
fn fetch(keys: &[[u8; 32]]) -> impl Stream<Item = io::Result<Bytes>> + use<> {
// S3 supports only HTTP/1.1, but we can ease the pain somewhat by using
// HTTP pipelining. It terminates the TCP connection after receiving 100
// requests, so we chunk the keys up accordingly, and make one connection

View file

@ -7570,7 +7570,7 @@ rec {
"weave" = rec {
crateName = "weave";
version = "0.1.0";
edition = "2021";
edition = "2024";
crateBin = [
{
name = "swizzle";

View file

@ -1,7 +1,7 @@
[package]
name = "weave"
version = "0.1.0"
edition = "2021"
edition = "2024"
[workspace]
members = ["."]

View file

@ -0,0 +1 @@
edition = "2024"

View file

@ -33,13 +33,13 @@
use anyhow::Result;
use hashbrown::HashTable;
use polars::{
lazy::dsl::{col, SpecialEq},
lazy::dsl::{SpecialEq, col},
prelude::*,
};
use tracing::info_span;
use tracing_indicatif::span_ext::IndicatifSpanExt as _;
use weave::{as_fixed_binary, hash64, leak, load_ph_array, INDEX_NULL};
use weave::{INDEX_NULL, as_fixed_binary, hash64, leak, load_ph_array};
#[tracing::instrument]
fn main() -> Result<()> {

View file

@ -6,7 +6,7 @@
//! the row numbers in `narinfo.parquet` corresponding to live paths.
use anyhow::Result;
use hashbrown::{hash_table, HashTable};
use hashbrown::{HashTable, hash_table};
use rayon::prelude::*;
use rustc_hash::FxHashSet;
use std::{
@ -25,7 +25,7 @@ use polars::{
prelude::*,
};
use weave::{as_fixed_binary, hash64, INDEX_NULL};
use weave::{INDEX_NULL, as_fixed_binary, hash64};
#[tracing::instrument]
fn main() -> Result<()> {