Get rid of unwraps/expects
The remaining ones should be genuine "impossible" conditions. main() now renders the error value using anyhow.
This commit is contained in:
parent
75b1450fdf
commit
5f981d2f91
|
@ -63,12 +63,15 @@ async fn workflow_finish(
|
||||||
upload_paths(new_paths.clone(), &store_uri).await?;
|
upload_paths(new_paths.clone(), &store_uri).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let sender = state.shutdown_sender.lock().await.take().unwrap();
|
if let Some(sender) = state.shutdown_sender.lock().await.take() {
|
||||||
sender.send(()).unwrap();
|
sender
|
||||||
|
.send(())
|
||||||
|
.expect("Cannot send shutdown server message");
|
||||||
|
|
||||||
// Wait for the Attic push workers to finish.
|
// Wait for the Attic push workers to finish.
|
||||||
if let Some(attic_state) = state.flakehub_state.write().await.take() {
|
if let Some(attic_state) = state.flakehub_state.write().await.take() {
|
||||||
attic_state.push_session.wait().await.unwrap();
|
attic_state.push_session.wait().await?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let reply = WorkflowFinishResponse {
|
let reply = WorkflowFinishResponse {
|
||||||
|
@ -93,7 +96,7 @@ fn make_store_uri(self_endpoint: &SocketAddr) -> String {
|
||||||
.authority(self_endpoint.to_string())
|
.authority(self_endpoint.to_string())
|
||||||
.path_and_query("/?compression=zstd¶llel-compression=true")
|
.path_and_query("/?compression=zstd¶llel-compression=true")
|
||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.expect("Cannot construct URL to self")
|
||||||
.to_string()
|
.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -48,6 +48,9 @@ pub enum Error {
|
||||||
|
|
||||||
#[error("Bad URL")]
|
#[error("Bad URL")]
|
||||||
BadUrl(reqwest::Url),
|
BadUrl(reqwest::Url),
|
||||||
|
|
||||||
|
#[error("Configuration error: {0}")]
|
||||||
|
Config(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IntoResponse for Error {
|
impl IntoResponse for Error {
|
||||||
|
|
|
@ -55,6 +55,20 @@ pub async fn init_cache(
|
||||||
.ok_or_else(|| Error::BadUrl(flakehub_cache_server.to_owned()))?
|
.ok_or_else(|| Error::BadUrl(flakehub_cache_server.to_owned()))?
|
||||||
.to_string();
|
.to_string();
|
||||||
|
|
||||||
|
let login = netrc_entry.login.as_ref().ok_or_else(|| {
|
||||||
|
Error::Config(format!(
|
||||||
|
"netrc file does not contain a login for '{}'",
|
||||||
|
flakehub_cache_server
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let password = netrc_entry.password.as_ref().ok_or_else(|| {
|
||||||
|
Error::Config(format!(
|
||||||
|
"netrc file does not contain a password for '{}'",
|
||||||
|
flakehub_cache_server
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
// Append an entry for the FlakeHub cache server to netrc.
|
// Append an entry for the FlakeHub cache server to netrc.
|
||||||
if !netrc
|
if !netrc
|
||||||
.machines
|
.machines
|
||||||
|
@ -70,8 +84,7 @@ pub async fn init_cache(
|
||||||
.write_all(
|
.write_all(
|
||||||
format!(
|
format!(
|
||||||
"\nmachine {} password {}\n\n",
|
"\nmachine {} password {}\n\n",
|
||||||
flakehub_cache_server_hostname,
|
flakehub_cache_server_hostname, password,
|
||||||
netrc_entry.password.as_ref().unwrap(),
|
|
||||||
)
|
)
|
||||||
.as_bytes(),
|
.as_bytes(),
|
||||||
)
|
)
|
||||||
|
@ -80,20 +93,18 @@ pub async fn init_cache(
|
||||||
|
|
||||||
// Get the cache UUID for this project.
|
// Get the cache UUID for this project.
|
||||||
let cache_name = {
|
let cache_name = {
|
||||||
let github_repo = env::var("GITHUB_REPOSITORY")
|
let github_repo = env::var("GITHUB_REPOSITORY").map_err(|_| {
|
||||||
.expect("GITHUB_REPOSITORY environment variable is not set");
|
Error::Config("GITHUB_REPOSITORY environment variable is not set".to_owned())
|
||||||
|
})?;
|
||||||
|
|
||||||
let url = flakehub_api_server
|
let url = flakehub_api_server
|
||||||
.join(&format!("project/{}", github_repo))
|
.join(&format!("project/{}", github_repo))
|
||||||
.unwrap();
|
.map_err(|_| Error::Config(format!("bad URL '{}'", flakehub_api_server)))?;
|
||||||
|
|
||||||
let response = reqwest::Client::new()
|
let response = reqwest::Client::new()
|
||||||
.get(url.to_owned())
|
.get(url.to_owned())
|
||||||
.header("User-Agent", USER_AGENT)
|
.header("User-Agent", USER_AGENT)
|
||||||
.basic_auth(
|
.basic_auth(login, Some(password))
|
||||||
netrc_entry.login.as_ref().unwrap(),
|
|
||||||
netrc_entry.password.as_ref(),
|
|
||||||
)
|
|
||||||
.send()
|
.send()
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
|
|
@ -28,6 +28,7 @@ use std::path::{Path, PathBuf};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use ::attic::nix_store::NixStore;
|
use ::attic::nix_store::NixStore;
|
||||||
|
use anyhow::{anyhow, Context, Result};
|
||||||
use axum::{extract::Extension, routing::get, Router};
|
use axum::{extract::Extension, routing::get, Router};
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use tempfile::NamedTempFile;
|
use tempfile::NamedTempFile;
|
||||||
|
@ -139,34 +140,35 @@ struct StateInner {
|
||||||
flakehub_state: RwLock<Option<flakehub::State>>,
|
flakehub_state: RwLock<Option<flakehub::State>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn main_cli() {
|
async fn main_cli() -> Result<()> {
|
||||||
init_logging();
|
init_logging();
|
||||||
|
|
||||||
let args = Args::parse();
|
let args = Args::parse();
|
||||||
|
|
||||||
create_dir_all(Path::new(&args.nix_conf).parent().unwrap())
|
if let Some(parent) = Path::new(&args.nix_conf).parent() {
|
||||||
.expect("Creating parent directories of nix.conf");
|
create_dir_all(parent).with_context(|| "Creating parent directories of nix.conf")?;
|
||||||
|
}
|
||||||
|
|
||||||
let mut nix_conf = OpenOptions::new()
|
let mut nix_conf = OpenOptions::new()
|
||||||
.create(true)
|
.create(true)
|
||||||
.append(true)
|
.append(true)
|
||||||
.open(args.nix_conf)
|
.open(args.nix_conf)
|
||||||
.expect("Opening nix.conf");
|
.with_context(|| "Creating nix.conf")?;
|
||||||
|
|
||||||
let store = Arc::new(NixStore::connect().expect("Connecting to the Nix store"));
|
let store = Arc::new(NixStore::connect()?);
|
||||||
|
|
||||||
let flakehub_state = if args.use_flakehub {
|
let flakehub_state = if args.use_flakehub {
|
||||||
let flakehub_cache_server = args
|
let flakehub_cache_server = args
|
||||||
.flakehub_cache_server
|
.flakehub_cache_server
|
||||||
.expect("--flakehub-cache-server is required");
|
.ok_or_else(|| anyhow!("--flakehub-cache-server is required"))?;
|
||||||
let flakehub_api_server_netrc = args
|
let flakehub_api_server_netrc = args
|
||||||
.flakehub_api_server_netrc
|
.flakehub_api_server_netrc
|
||||||
.expect("--flakehub-api-server-netrc is required");
|
.ok_or_else(|| anyhow!("--flakehub-api-server-netrc is required"))?;
|
||||||
|
|
||||||
match flakehub::init_cache(
|
match flakehub::init_cache(
|
||||||
&args
|
&args
|
||||||
.flakehub_api_server
|
.flakehub_api_server
|
||||||
.expect("--flakehub-api-server is required"),
|
.ok_or_else(|| anyhow!("--flakehub-api-server is required"))?,
|
||||||
&flakehub_api_server_netrc,
|
&flakehub_api_server_netrc,
|
||||||
&flakehub_cache_server,
|
&flakehub_cache_server,
|
||||||
store.clone(),
|
store.clone(),
|
||||||
|
@ -183,7 +185,7 @@ async fn main_cli() {
|
||||||
)
|
)
|
||||||
.as_bytes(),
|
.as_bytes(),
|
||||||
)
|
)
|
||||||
.expect("Writing to nix.conf");
|
.with_context(|| "Writing to nix.conf")?;
|
||||||
|
|
||||||
tracing::info!("FlakeHub cache is enabled.");
|
tracing::info!("FlakeHub cache is enabled.");
|
||||||
Some(state)
|
Some(state)
|
||||||
|
@ -201,16 +203,27 @@ async fn main_cli() {
|
||||||
let api = if args.use_gha_cache {
|
let api = if args.use_gha_cache {
|
||||||
let credentials = if let Some(credentials_file) = &args.credentials_file {
|
let credentials = if let Some(credentials_file) = &args.credentials_file {
|
||||||
tracing::info!("Loading credentials from {:?}", credentials_file);
|
tracing::info!("Loading credentials from {:?}", credentials_file);
|
||||||
let bytes = fs::read(credentials_file).expect("Failed to read credentials file");
|
let bytes = fs::read(credentials_file).with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Failed to read credentials file '{}'",
|
||||||
|
credentials_file.display()
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
serde_json::from_slice(&bytes).expect("Failed to deserialize credentials file")
|
serde_json::from_slice(&bytes).with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Failed to deserialize credentials file '{}'",
|
||||||
|
credentials_file.display()
|
||||||
|
)
|
||||||
|
})?
|
||||||
} else {
|
} else {
|
||||||
tracing::info!("Loading credentials from environment");
|
tracing::info!("Loading credentials from environment");
|
||||||
Credentials::load_from_env()
|
Credentials::load_from_env()
|
||||||
.expect("Failed to load credentials from environment (see README.md)")
|
.with_context(|| "Failed to load credentials from environment (see README.md)")?
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut api = Api::new(credentials).expect("Failed to initialize GitHub Actions Cache API");
|
let mut api = Api::new(credentials)
|
||||||
|
.with_context(|| "Failed to initialize GitHub Actions Cache API")?;
|
||||||
|
|
||||||
if let Some(cache_version) = &args.cache_version {
|
if let Some(cache_version) = &args.cache_version {
|
||||||
api.mutate_version(cache_version.as_bytes());
|
api.mutate_version(cache_version.as_bytes());
|
||||||
|
@ -218,7 +231,7 @@ async fn main_cli() {
|
||||||
|
|
||||||
nix_conf
|
nix_conf
|
||||||
.write_all(format!("extra-substituters = http://{}?trusted=1&compression=zstd¶llel-compression=true&priority=1\n", args.listen).as_bytes())
|
.write_all(format!("extra-substituters = http://{}?trusted=1&compression=zstd¶llel-compression=true&priority=1\n", args.listen).as_bytes())
|
||||||
.expect("Writing to nix.conf");
|
.with_context(|| "Writing to nix.conf")?;
|
||||||
|
|
||||||
tracing::info!("Native GitHub Action cache is enabled.");
|
tracing::info!("Native GitHub Action cache is enabled.");
|
||||||
Some(api)
|
Some(api)
|
||||||
|
@ -231,24 +244,27 @@ async fn main_cli() {
|
||||||
* ignores errors, to avoid the Nix build from failing. */
|
* ignores errors, to avoid the Nix build from failing. */
|
||||||
let post_build_hook_script = {
|
let post_build_hook_script = {
|
||||||
let mut file = NamedTempFile::with_prefix("magic-nix-cache-build-hook-")
|
let mut file = NamedTempFile::with_prefix("magic-nix-cache-build-hook-")
|
||||||
.expect("Creating a temporary file");
|
.with_context(|| "Creating a temporary file for the post-build hook")?;
|
||||||
file.write_all(
|
file.write_all(
|
||||||
format!(
|
format!(
|
||||||
// NOTE(cole-h): We want to exit 0 even if the hook failed, otherwise it'll fail the
|
// NOTE(cole-h): We want to exit 0 even if the hook failed, otherwise it'll fail the
|
||||||
// build itself
|
// build itself
|
||||||
"#! /bin/sh\nRUST_BACKTRACE=full {} --server {}\nexit 0\n",
|
"#! /bin/sh\nRUST_BACKTRACE=full {} --server {}\nexit 0\n",
|
||||||
std::env::current_exe()
|
std::env::current_exe()
|
||||||
.expect("Getting the path of magic-nix-cache")
|
.with_context(|| "Getting the path of magic-nix-cache")?
|
||||||
.display(),
|
.display(),
|
||||||
args.listen
|
args.listen
|
||||||
)
|
)
|
||||||
.as_bytes(),
|
.as_bytes(),
|
||||||
)
|
)
|
||||||
.expect("Writing the post-build hook");
|
.with_context(|| "Writing the post-build hook")?;
|
||||||
file.keep().unwrap().1
|
file.keep()
|
||||||
|
.with_context(|| "Keeping the post-build hook")?
|
||||||
|
.1
|
||||||
};
|
};
|
||||||
|
|
||||||
fs::set_permissions(&post_build_hook_script, fs::Permissions::from_mode(0o755)).unwrap();
|
fs::set_permissions(&post_build_hook_script, fs::Permissions::from_mode(0o755))
|
||||||
|
.with_context(|| "Setting permissions on the post-build hook")?;
|
||||||
|
|
||||||
/* Update nix.conf. */
|
/* Update nix.conf. */
|
||||||
nix_conf
|
nix_conf
|
||||||
|
@ -259,7 +275,7 @@ async fn main_cli() {
|
||||||
)
|
)
|
||||||
.as_bytes(),
|
.as_bytes(),
|
||||||
)
|
)
|
||||||
.expect("Writing to nix.conf");
|
.with_context(|| "Writing to nix.conf")?;
|
||||||
|
|
||||||
drop(nix_conf);
|
drop(nix_conf);
|
||||||
|
|
||||||
|
@ -309,15 +325,15 @@ async fn main_cli() {
|
||||||
match response {
|
match response {
|
||||||
Ok(response) => {
|
Ok(response) => {
|
||||||
if !response.status().is_success() {
|
if !response.status().is_success() {
|
||||||
panic!(
|
Err(anyhow!(
|
||||||
"Startup notification returned an error: {}\n{}",
|
"Startup notification returned an error: {}\n{}",
|
||||||
response.status(),
|
response.status(),
|
||||||
response.text().await.unwrap_or_else(|_| "".to_owned())
|
response.text().await.unwrap_or_else(|_| "".to_owned())
|
||||||
);
|
))?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
panic!("Startup notification failed: {}", err);
|
Err(anyhow!("Startup notification failed: {}", err))?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -334,10 +350,12 @@ async fn main_cli() {
|
||||||
state.metrics.send(diagnostic_endpoint).await;
|
state.metrics.send(diagnostic_endpoint).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
ret.unwrap()
|
ret?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn post_build_hook(out_paths: &str) {
|
async fn post_build_hook(out_paths: &str) -> Result<()> {
|
||||||
#[derive(Parser, Debug)]
|
#[derive(Parser, Debug)]
|
||||||
struct Args {
|
struct Args {
|
||||||
/// `magic-nix-cache` daemon to connect to.
|
/// `magic-nix-cache` daemon to connect to.
|
||||||
|
@ -357,43 +375,33 @@ async fn post_build_hook(out_paths: &str) {
|
||||||
let response = reqwest::Client::new()
|
let response = reqwest::Client::new()
|
||||||
.post(format!("http://{}/api/enqueue-paths", &args.server))
|
.post(format!("http://{}/api/enqueue-paths", &args.server))
|
||||||
.header(reqwest::header::CONTENT_TYPE, "application/json")
|
.header(reqwest::header::CONTENT_TYPE, "application/json")
|
||||||
.body(serde_json::to_string(&request).unwrap())
|
.body(
|
||||||
|
serde_json::to_string(&request)
|
||||||
|
.with_context(|| "Decoding the response from the magic-nix-cache server")?,
|
||||||
|
)
|
||||||
.send()
|
.send()
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
let mut err_message = None;
|
|
||||||
match response {
|
match response {
|
||||||
Ok(response) if !response.status().is_success() => {
|
Ok(response) if !response.status().is_success() => Err(anyhow!(
|
||||||
err_message = Some(format!(
|
"magic-nix-cache server failed to enqueue the push request: {}\n{}",
|
||||||
"magic-nix-cache server failed to enqueue the push request: {}\n{}",
|
response.status(),
|
||||||
response.status(),
|
response.text().await.unwrap_or_else(|_| "".to_owned()),
|
||||||
response.text().await.unwrap_or_else(|_| "".to_owned()),
|
))?,
|
||||||
));
|
Ok(response) => response
|
||||||
}
|
.json::<api::EnqueuePathsResponse>()
|
||||||
Ok(response) => {
|
.await
|
||||||
let enqueue_paths_response = response.json::<api::EnqueuePathsResponse>().await;
|
.with_context(|| "magic-nix-cache-server didn't return a valid response")?,
|
||||||
if let Err(err) = enqueue_paths_response {
|
|
||||||
err_message = Some(format!(
|
|
||||||
"magic-nix-cache-server didn't return a valid response: {}",
|
|
||||||
err
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
err_message = Some(format!(
|
Err(err).with_context(|| "magic-nix-cache server failed to send the enqueue request")?
|
||||||
"magic-nix-cache server failed to send the enqueue request: {}",
|
|
||||||
err
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
|
||||||
if let Some(err_message) = err_message {
|
Ok(())
|
||||||
eprintln!("{err_message}");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() {
|
async fn main() -> Result<()> {
|
||||||
match std::env::var("OUT_PATHS") {
|
match std::env::var("OUT_PATHS") {
|
||||||
Ok(out_paths) => post_build_hook(&out_paths).await,
|
Ok(out_paths) => post_build_hook(&out_paths).await,
|
||||||
Err(_) => main_cli().await,
|
Err(_) => main_cli().await,
|
||||||
|
|
|
@ -53,16 +53,20 @@ pub async fn upload_paths(mut paths: Vec<PathBuf>, store_uri: &str) -> Result<()
|
||||||
.output()
|
.output()
|
||||||
.await?
|
.await?
|
||||||
.stdout;
|
.stdout;
|
||||||
let env_path = String::from_utf8(env_path).expect("PATH contains invalid UTF-8");
|
let env_path = String::from_utf8(env_path)
|
||||||
|
.map_err(|_| Error::Config("PATH contains invalid UTF-8".to_owned()))?;
|
||||||
|
|
||||||
while !paths.is_empty() {
|
while !paths.is_empty() {
|
||||||
let mut batch = Vec::new();
|
let mut batch = Vec::new();
|
||||||
let mut total_len = 0;
|
let mut total_len = 0;
|
||||||
|
|
||||||
while !paths.is_empty() && total_len < 1024 * 1024 {
|
while total_len < 1024 * 1024 {
|
||||||
let p = paths.pop().unwrap();
|
if let Some(p) = paths.pop() {
|
||||||
total_len += p.as_os_str().len() + 1;
|
total_len += p.as_os_str().len() + 1;
|
||||||
batch.push(p);
|
batch.push(p);
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
tracing::debug!("{} paths in this batch", batch.len());
|
tracing::debug!("{} paths in this batch", batch.len());
|
||||||
|
|
Loading…
Reference in a new issue