Cleanup
This commit is contained in:
parent
b41211dc24
commit
1f46e11aa7
|
@ -49,7 +49,6 @@ async fn workflow_finish(
|
||||||
Extension(state): Extension<State>,
|
Extension(state): Extension<State>,
|
||||||
) -> Result<Json<WorkflowFinishResponse>> {
|
) -> Result<Json<WorkflowFinishResponse>> {
|
||||||
tracing::info!("Workflow finished");
|
tracing::info!("Workflow finished");
|
||||||
|
|
||||||
let original_paths = state.original_paths.lock().await;
|
let original_paths = state.original_paths.lock().await;
|
||||||
let final_paths = get_store_paths(&state.store).await?;
|
let final_paths = get_store_paths(&state.store).await?;
|
||||||
let new_paths = final_paths
|
let new_paths = final_paths
|
||||||
|
@ -66,7 +65,7 @@ async fn workflow_finish(
|
||||||
if let Some(sender) = state.shutdown_sender.lock().await.take() {
|
if let Some(sender) = state.shutdown_sender.lock().await.take() {
|
||||||
sender
|
sender
|
||||||
.send(())
|
.send(())
|
||||||
.expect("Cannot send shutdown server message");
|
.map_err(|_| Error::Internal("Sending shutdown server message".to_owned()))?;
|
||||||
|
|
||||||
// Wait for the Attic push workers to finish.
|
// Wait for the Attic push workers to finish.
|
||||||
if let Some(attic_state) = state.flakehub_state.write().await.take() {
|
if let Some(attic_state) = state.flakehub_state.write().await.take() {
|
||||||
|
|
|
@ -51,6 +51,9 @@ pub enum Error {
|
||||||
|
|
||||||
#[error("Configuration error: {0}")]
|
#[error("Configuration error: {0}")]
|
||||||
Config(String),
|
Config(String),
|
||||||
|
|
||||||
|
#[error("Internal error: {0}")]
|
||||||
|
Internal(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IntoResponse for Error {
|
impl IntoResponse for Error {
|
||||||
|
|
|
@ -39,7 +39,7 @@ pub async fn init_cache(
|
||||||
netrc_rs::Netrc::parse(netrc_contents, false).map_err(Error::Netrc)?
|
netrc_rs::Netrc::parse(netrc_contents, false).map_err(Error::Netrc)?
|
||||||
};
|
};
|
||||||
|
|
||||||
let netrc_entry = {
|
let flakehub_netrc_entry = {
|
||||||
netrc
|
netrc
|
||||||
.machines
|
.machines
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -55,17 +55,17 @@ pub async fn init_cache(
|
||||||
.ok_or_else(|| Error::BadUrl(flakehub_cache_server.to_owned()))?
|
.ok_or_else(|| Error::BadUrl(flakehub_cache_server.to_owned()))?
|
||||||
.to_string();
|
.to_string();
|
||||||
|
|
||||||
let login = netrc_entry.login.as_ref().ok_or_else(|| {
|
let flakehub_login = flakehub_netrc_entry.login.as_ref().ok_or_else(|| {
|
||||||
Error::Config(format!(
|
Error::Config(format!(
|
||||||
"netrc file does not contain a login for '{}'",
|
"netrc file does not contain a login for '{}'",
|
||||||
flakehub_cache_server
|
flakehub_api_server
|
||||||
))
|
))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let password = netrc_entry.password.as_ref().ok_or_else(|| {
|
let flakehub_password = flakehub_netrc_entry.password.as_ref().ok_or_else(|| {
|
||||||
Error::Config(format!(
|
Error::Config(format!(
|
||||||
"netrc file does not contain a password for '{}'",
|
"netrc file does not contain a password for '{}'",
|
||||||
flakehub_cache_server
|
flakehub_api_server
|
||||||
))
|
))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
@ -83,8 +83,8 @@ pub async fn init_cache(
|
||||||
netrc_file
|
netrc_file
|
||||||
.write_all(
|
.write_all(
|
||||||
format!(
|
format!(
|
||||||
"\nmachine {} password {}\n\n",
|
"\nmachine {} login {} password {}\n\n",
|
||||||
flakehub_cache_server_hostname, password,
|
flakehub_cache_server_hostname, flakehub_login, flakehub_password,
|
||||||
)
|
)
|
||||||
.as_bytes(),
|
.as_bytes(),
|
||||||
)
|
)
|
||||||
|
@ -104,7 +104,7 @@ pub async fn init_cache(
|
||||||
let response = reqwest::Client::new()
|
let response = reqwest::Client::new()
|
||||||
.get(url.to_owned())
|
.get(url.to_owned())
|
||||||
.header("User-Agent", USER_AGENT)
|
.header("User-Agent", USER_AGENT)
|
||||||
.basic_auth(login, Some(password))
|
.basic_auth(flakehub_login, Some(flakehub_password))
|
||||||
.send()
|
.send()
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
@ -135,7 +135,7 @@ pub async fn init_cache(
|
||||||
|
|
||||||
let api = ApiClient::from_server_config(ServerConfig {
|
let api = ApiClient::from_server_config(ServerConfig {
|
||||||
endpoint: flakehub_cache_server.to_string(),
|
endpoint: flakehub_cache_server.to_string(),
|
||||||
token: netrc_entry.password.as_ref().cloned(),
|
token: flakehub_netrc_entry.password.as_ref().cloned(),
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let cache_config = api.get_cache_config(&cache).await?;
|
let cache_config = api.get_cache_config(&cache).await?;
|
||||||
|
|
|
@ -328,12 +328,15 @@ async fn main_cli() -> Result<()> {
|
||||||
Err(anyhow!(
|
Err(anyhow!(
|
||||||
"Startup notification returned an error: {}\n{}",
|
"Startup notification returned an error: {}\n{}",
|
||||||
response.status(),
|
response.status(),
|
||||||
response.text().await.unwrap_or_else(|_| "<no response text>".to_owned())
|
response
|
||||||
|
.text()
|
||||||
|
.await
|
||||||
|
.unwrap_or_else(|_| "<no response text>".to_owned())
|
||||||
))?;
|
))?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(err) => {
|
err @ Err(_) => {
|
||||||
Err(anyhow!("Startup notification failed: {}", err))?;
|
err.with_context(|| "Startup notification failed")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -386,7 +389,10 @@ async fn post_build_hook(out_paths: &str) -> Result<()> {
|
||||||
Ok(response) if !response.status().is_success() => Err(anyhow!(
|
Ok(response) if !response.status().is_success() => Err(anyhow!(
|
||||||
"magic-nix-cache server failed to enqueue the push request: {}\n{}",
|
"magic-nix-cache server failed to enqueue the push request: {}\n{}",
|
||||||
response.status(),
|
response.status(),
|
||||||
response.text().await.unwrap_or_else(|_| "<no response text>".to_owned()),
|
response
|
||||||
|
.text()
|
||||||
|
.await
|
||||||
|
.unwrap_or_else(|_| "<no response text>".to_owned()),
|
||||||
))?,
|
))?,
|
||||||
Ok(response) => response
|
Ok(response) => response
|
||||||
.json::<api::EnqueuePathsResponse>()
|
.json::<api::EnqueuePathsResponse>()
|
||||||
|
|
Loading…
Reference in a new issue