Compare commits

..

2 commits

Author SHA1 Message Date
Cole Mickens 4631601e5d send and wait for dnixd flush signal 2025-04-01 13:01:57 -07:00
Cole Mickens cf00f851e1 improve unauthenticated error message 2025-03-26 11:03:21 -07:00
10 changed files with 593 additions and 818 deletions

View file

@ -1,20 +0,0 @@
name: update-flake-lock
on:
workflow_dispatch: # enable manual triggering
schedule:
- cron: "0 0 * * 0" # every Sunday at midnight
jobs:
lockfile:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: DeterminateSystems/nix-installer-action@main
- uses: DeterminateSystems/flakehub-cache-action@main
- uses: DeterminateSystems/update-flake-lock@main
with:
pr-title: Update flake.lock
pr-labels: |
dependencies
automated

1258
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -2,16 +2,17 @@
"nodes": {
"crane": {
"locked": {
"lastModified": 1741479724,
"narHash": "sha256-fnyETBKSVRa5abjOiRG/IAzKZq5yX8U6oRrHstPl4VM=",
"rev": "60202a2e3597a3d91f5e791aab03f45470a738b5",
"revCount": 709,
"type": "tarball",
"url": "https://api.flakehub.com/f/pinned/ipetkov/crane/0.20.2/0195784b-915b-7d2d-915d-ab02d1112ef9/source.tar.gz"
"lastModified": 1742394900,
"narHash": "sha256-vVOAp9ahvnU+fQoKd4SEXB2JG2wbENkpqcwlkIXgUC0=",
"owner": "ipetkov",
"repo": "crane",
"rev": "70947c1908108c0c551ddfd73d4f750ff2ea67cd",
"type": "github"
},
"original": {
"type": "tarball",
"url": "https://flakehub.com/f/ipetkov/crane/%2A"
"owner": "ipetkov",
"repo": "crane",
"type": "github"
}
},
"flake-compat": {
@ -101,7 +102,7 @@
},
"original": {
"type": "tarball",
"url": "https://flakehub.com/f/NixOS/nix/2"
"url": "https://flakehub.com/f/NixOS/nix/2.tar.gz"
}
},
"nixpkgs": {
@ -163,7 +164,7 @@
},
"original": {
"type": "tarball",
"url": "https://flakehub.com/f/NixOS/nixpkgs/0.1"
"url": "https://flakehub.com/f/NixOS/nixpkgs/0.1.tar.gz"
}
},
"root": {

View file

@ -2,14 +2,16 @@
description = "GitHub Actions-powered Nix binary cache";
inputs = {
nixpkgs.url = "https://flakehub.com/f/NixOS/nixpkgs/0.1";
nixpkgs.url = "https://flakehub.com/f/NixOS/nixpkgs/0.1.tar.gz";
crane.url = "https://flakehub.com/f/ipetkov/crane/*";
# Pinned to `master` until a release containing
# <https://github.com/ipetkov/crane/pull/792> is cut.
crane.url = "github:ipetkov/crane";
nix.url = "https://flakehub.com/f/NixOS/nix/2";
nix.url = "https://flakehub.com/f/NixOS/nix/2.tar.gz";
};
outputs = inputs:
outputs = { self, nixpkgs, crane, ... }@inputs:
let
supportedSystems = [
"aarch64-linux"
@ -18,13 +20,14 @@
"x86_64-darwin"
];
forEachSupportedSystem = f: inputs.nixpkgs.lib.genAttrs supportedSystems (system: f rec {
pkgs = import inputs.nixpkgs {
forEachSupportedSystem = f: nixpkgs.lib.genAttrs supportedSystems (system: f rec {
pkgs = import nixpkgs {
inherit system;
overlays = [
inputs.self.overlays.default
self.overlays.default
];
};
inherit (pkgs) lib;
inherit system;
});
in
@ -32,14 +35,14 @@
overlays.default = final: prev:
let
craneLib = inputs.crane.mkLib final;
craneLib = crane.mkLib final;
crateName = craneLib.crateNameFromCargoToml {
cargoToml = ./magic-nix-cache/Cargo.toml;
};
commonArgs = {
inherit (crateName) pname version;
src = inputs.self;
src = self;
nativeBuildInputs = with final; [
pkg-config
@ -94,7 +97,7 @@
createChain 200 startFile;
});
devShells = forEachSupportedSystem ({ system, pkgs }: {
devShells = forEachSupportedSystem ({ system, pkgs, lib }: {
default = pkgs.mkShell {
packages = with pkgs; [
rustc

View file

@ -16,7 +16,7 @@ serde = { version = "1.0.162", default-features = false, features = ["derive"] }
serde_json = { version = "1.0.96", default-features = false }
sha2 = { version = "0.10.6", default-features = false }
thiserror = "1.0.40"
tokio = { version = "1.44.2", default-features = false, features = ["io-util"] }
tokio = { version = "1.28.0", default-features = false, features = ["io-util"] }
tracing = { version = "0.1.37", default-features = false }
unicode-bom = "2.0.2"

View file

@ -58,8 +58,9 @@ http-body-util = "0.1"
hyper = { version = "1.0.0", features = ["full"] }
hyper-util = { version = "0.1", features = ["tokio", "server-auto", "http1"] }
xdg = { version = "2.5.2" }
color-eyre = { version = "0.6.3" }
[dependencies.tokio]
version = "1.44.2"
version = "1.28.0"
default-features = false
features = ["fs", "macros", "process", "rt", "rt-multi-thread", "sync"]

View file

@ -94,6 +94,10 @@ async fn workflow_finish(
}
};
// maybe here send the request to Dnixd to Flush
// save uuid from response
// then wait on receiver until we get that same uuid back
if let Some(gha_cache) = &state.gha_cache {
tracing::info!("Waiting for GitHub action cache uploads to finish");
gha_cache.shutdown().await?;

View file

@ -0,0 +1,52 @@
use serde::{Deserialize, Serialize};
const GITHUB_ACTOR_TYPE_USER: &str = "User";
const GITHUB_ACTOR_TYPE_ORGANIZATION: &str = "Organization";
#[derive(Serialize, Deserialize)]
pub struct WorkflowData {
event: WorkflowDataEvent,
}
#[derive(Serialize, Deserialize)]
pub struct WorkflowDataEvent {
repository: WorkflowDataEventRepo,
}
#[derive(Serialize, Deserialize)]
pub struct WorkflowDataEventRepo {
owner: WorkflowDataEventRepoOwner,
}
#[derive(Serialize, Deserialize)]
pub struct WorkflowDataEventRepoOwner {
login: String,
#[serde(rename = "type")]
kind: String,
}
pub(crate) fn get_actions_event_data() -> color_eyre::Result<WorkflowData> {
let github_context = std::env::var("GITHUB_CONTEXT")?;
let workflow_data: WorkflowData = serde_json::from_str::<WorkflowData>(&github_context)?;
Ok(workflow_data)
}
pub(crate) fn print_unauthenticated_error() {
let mut msg = "::error title=FlakeHub registration required.::Unable to authenticate to FlakeHub. Individuals must register at FlakeHub.com; Organizations must create an organization at FlakeHub.com.".to_string();
if let Ok(workflow_data) = get_actions_event_data() {
let owner = workflow_data.event.repository.owner;
if owner.kind == GITHUB_ACTOR_TYPE_USER {
msg = format!(
"::error title=FlakeHub registration required.::Please create an account for {} on FlakeHub.com to publish flakes.",
&owner.login
);
} else if owner.kind == GITHUB_ACTOR_TYPE_ORGANIZATION {
msg = format!(
"::error title=FlakeHub registration required.::Please create an organization for {} on FlakeHub.com to publish flakes.",
&owner.login
);
}
};
println!("{}", msg);
}

View file

@ -18,6 +18,7 @@ mod env;
mod error;
mod flakehub;
mod gha;
mod github;
mod pbh;
mod telemetry;
mod util;
@ -36,6 +37,7 @@ use clap::Parser;
use serde::{Deserialize, Serialize};
use tokio::fs::File;
use tokio::io::AsyncWriteExt;
use tokio::sync::mpsc::UnboundedReceiver;
use tokio::sync::{oneshot, Mutex, RwLock};
use tracing_subscriber::filter::EnvFilter;
use tracing_subscriber::layer::SubscriberExt;
@ -226,6 +228,9 @@ struct StateInner {
/// The paths in the Nix store when Magic Nix Cache started, if store diffing is enabled.
original_paths: Option<Mutex<HashSet<PathBuf>>>,
/// The receiver side of the channel we use to get flush events from dnixd back to the workflow_shutdown handler
dnixd_flush_receiver: Option<UnboundedReceiver<uuid::Uuid>>,
}
#[derive(Debug, Clone)]
@ -366,8 +371,11 @@ async fn main_cli() -> Result<()> {
Some(state)
}
Err(err) => {
tracing::error!("FlakeHub cache initialization failed: {}. Unable to authenticate to FlakeHub. Individuals must register at FlakeHub.com; Organizations must create an organization at FlakeHub.com.", err);
println!("::error title={{FlakeHub: Unauthenticated}}::{{Unable to authenticate to FlakeHub. Individuals must register at FlakeHub.com; Organizations must create an organization at FlakeHub.com.}}");
tracing::error!(
"FlakeHub: cache initialized failed: Unauthenticated: {}",
err
);
github::print_unauthenticated_error();
None
}
}
@ -418,6 +426,15 @@ async fn main_cli() -> Result<()> {
let (shutdown_sender, shutdown_receiver) = oneshot::channel();
let dnixd_flush_channel = if dnixd_available == Dnixd::Available {
Some(tokio::sync::mpsc::unbounded_channel())
} else {
None
};
// ????
let dnixd_flush_sender = dnixd_flush_channel.as_ref().map(|c| c.0.clone());
let dnixd_flush_receiver = dnixd_flush_channel.map(|c| c.1);
let original_paths = args.diff_store.then_some(Mutex::new(HashSet::new()));
let state = Arc::new(StateInner {
gha_cache,
@ -429,11 +446,12 @@ async fn main_cli() -> Result<()> {
flakehub_state: RwLock::new(flakehub_state),
logfile: guard.logfile,
original_paths,
dnixd_flush_receiver,
});
if dnixd_available == Dnixd::Available {
tracing::info!("Subscribing to Determinate Nixd build events.");
crate::pbh::subscribe_uds_post_build_hook(dnixd_uds_socket_path, state.clone()).await?;
crate::pbh::subscribe_uds_post_build_hook(dnixd_uds_socket_path, state.clone(), dnixd_flush_sender).await?;
} else {
tracing::info!("Patching nix.conf to use a post-build-hook.");
crate::pbh::setup_legacy_post_build_hook(&args.listen, &mut nix_conf).await?;

View file

@ -21,6 +21,7 @@ use crate::State;
pub async fn subscribe_uds_post_build_hook(
dnixd_uds_socket_path: PathBuf,
state: State,
dnixd_flush_sender: Option<tokio::sync::mpsc::UnboundedSender<uuid::Uuid>>,
) -> Result<()> {
tokio::spawn(async move {
let dnixd_uds_socket_path = &dnixd_uds_socket_path;
@ -79,6 +80,9 @@ pub async fn subscribe_uds_post_build_hook(
tracing::debug!("built-paths subscription: ignoring non-data frame");
continue;
};
// TODO: check for flush event of flush type - send it to the sender
let Ok(event): core::result::Result<BuiltPathResponseEventV1, _> =
serde_json::from_slice(event_str)
else {