diff --git a/src/cli.rs b/src/cli.rs index 7a1bb45..a73c873 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -168,7 +168,7 @@ pub fn resolve_bite_config( } else { "polkadot".to_string() }; - + let relaychain = if relay_runtime.is_some() || rc_sync_url.is_some() || relay_bite_at.is_some() { // CLI args provided, use them diff --git a/src/config.rs b/src/config.rs index d43534f..b0e0208 100644 --- a/src/config.rs +++ b/src/config.rs @@ -17,9 +17,6 @@ pub fn get_state_pruning_config() -> String { env::var("ZOMBIE_BITE_STATE_PRUNING").unwrap_or_else(|_| STATE_PRUNING.to_string()) } -pub const AH_POLKADOT_RCP: &str = "https://asset-hub-polkadot-rpc.n.dwellir.com"; -pub const AH_KUSAMA_RCP: &str = "https://asset-hub-kusama-rpc.n.dwellir.com"; - #[derive(Debug, PartialEq, Clone, Copy)] pub enum Step { /// Initial step diff --git a/src/doppelganger.rs b/src/doppelganger.rs index 39d4c37..8a42a7c 100644 --- a/src/doppelganger.rs +++ b/src/doppelganger.rs @@ -1,7 +1,7 @@ #![allow(dead_code)] // TODO: don't allow dead_code -use anyhow::anyhow; +use anyhow::{anyhow, Context as context}; use futures::future::try_join_all; use futures::FutureExt; use serde_json::json; @@ -72,18 +72,15 @@ pub async fn doppelganger_inner( let provider = NativeProvider::new(filesystem.clone()); // ensure the base path exist - fs::create_dir_all(&global_base_dir).await.unwrap(); + fs::create_dir_all(&global_base_dir).await?; // add `/bite` to global base - let fixed_base_dir = global_base_dir.canonicalize().unwrap().join("bite"); + let fixed_base_dir = global_base_dir.canonicalize()?.join("bite"); let base_dir_str = fixed_base_dir.to_string_lossy(); let ns = provider .create_namespace_with_base_dir(fixed_base_dir.as_path()) - .await - .unwrap(); - - let _relaychain_rpc_random_port = get_random_port().await; + .await?; // Parachain sync let mut syncs = vec![]; @@ -95,13 +92,11 @@ pub async fn doppelganger_inner( let maybe_target_header_path = if let Some(at_block) = para.at_block() { let para_rpc = para .rpc_endpoint() - .expect("rpc for parachain should be set. qed"); + .context("rpc for parachain should be set. qed")?; let header = get_header_from_block(at_block, para_rpc).await?; let target_header_path = format!("{base_dir_str}/para-header.json"); - fs::write(&target_header_path, serde_json::to_string_pretty(&header)?) - .await - .expect("create target head json should works"); + fs::write(&target_header_path, serde_json::to_string_pretty(&header)?).await?; Some(target_header_path) } else { None @@ -123,7 +118,9 @@ pub async fn doppelganger_inner( ); } - let res = try_join_all(syncs).await.unwrap(); + let res = try_join_all(syncs) + .await + .map_err(|e| anyhow::anyhow!(format!("Failed to sync: {e:?}")))?; // loop over paras let mut para_artifacts = vec![]; @@ -132,10 +129,12 @@ pub async fn doppelganger_inner( for (para_index, (_sync_node, sync_db_path, sync_chain, sync_head_path)) in res.into_iter().enumerate() { - let sync_chain_name = if sync_chain.contains('/') { - let parts: Vec<&str> = sync_chain.split('/').collect(); - let name_parts: Vec<&str> = parts.last().unwrap().split('.').collect(); - name_parts.first().unwrap().to_string() + let sync_chain_name = if let Some(last_part) = sync_chain.rsplit('/').next() { + if let Some(first_name) = last_part.split('.').next() { + first_name.to_string() + } else { + return Err(anyhow::anyhow!("Invalid sync_chain: missing name")); + } } else { // is not a file sync_chain.clone() @@ -149,30 +148,36 @@ pub async fn doppelganger_inner( &sync_chain, ) .await - .unwrap(); + .map_err(|e| anyhow!("Failed to generate chain spec {e}"))?; // generate the data.tgz to use as snapshot let snap_path = format!("{}/{}-snap.tgz", &base_dir_str, &sync_chain_name); trace!("snap_path: {snap_path}"); - generate_snap(&sync_db_path, &snap_path).await.unwrap(); - - let para_head_str = read_to_string(&sync_head_path) - .unwrap_or_else(|_| panic!("read para_head ({sync_head_path}) file should works.")); - let para_head_hex = if ¶_head_str[..2] == "0x" { - ¶_head_str[2..] + generate_snap(&sync_db_path, &snap_path).await?; + let para_head_str = read_to_string(&sync_head_path)?; + let para_head_hex = if let Some(stripped) = para_head_str.strip_prefix("0x") { + stripped } else { ¶_head_str }; - let para_head = array_bytes::bytes2hex( - "0x", - HeadData(hex::decode(para_head_hex).expect("para_head should be a valid hex. qed")) - .encode(), - ); + let para_head = match hex::decode(para_head_hex) { + Ok(decoded) => { + let encoded = HeadData(decoded).encode(); + array_bytes::bytes2hex("0x", encoded) + } + Err(e) => { + return Err(anyhow::anyhow!( + "para_head should be a valid hex. qed '{}': {}", + para_head_hex, + e + )); + } + }; let para = paras_to .get(para_index) - .expect("para_index should be valid. qed"); + .context("para_index should be valid. qed")?; para_heads_env.push(( format!("ZOMBIE_{}", ¶_head_key(para.id())[2..]), para_head[2..].to_string(), @@ -201,9 +206,7 @@ pub async fn doppelganger_inner( let header = get_header_from_block(at_block, &relay_chain.rpc_endpoint()).await?; let target_header_path = format!("{base_dir_str}/rc-header.json"); - fs::write(&target_header_path, serde_json::to_string_pretty(&header)?) - .await - .expect("create target head json should works"); + fs::write(&target_header_path, serde_json::to_string_pretty(&header)?).await?; Some(target_header_path) } else { None @@ -220,10 +223,10 @@ pub async fn doppelganger_inner( database, ) .await - .unwrap(); + .map_err(|e| anyhow!("Failed to sync relay: {e:?}"))?; // stop relay node - sync_node.destroy().await.unwrap(); + sync_node.destroy().await?; // get the chain-spec (prod) and clean the bootnodes // relaychain @@ -236,7 +239,7 @@ pub async fn doppelganger_inner( &sync_chain, ) .await - .unwrap(); + .map_err(|e| anyhow::anyhow!("Failed to generate spec: {e}"))?; // remove `parachains` db let sync_chain_in_path = if sync_chain == "kusama" { @@ -252,13 +255,11 @@ pub async fn doppelganger_inner( }; debug!("Deleting `parachains` db at {parachains_path}"); - tokio::fs::remove_dir_all(parachains_path) - .await - .expect("remove parachains db should work"); + tokio::fs::remove_dir_all(¶chains_path).await?; // generate the data.tgz to use as snapshot let r_snap_path = format!("{}/{}-snap.tgz", &base_dir_str, &sync_chain); - generate_snap(&sync_db_path, &r_snap_path).await.unwrap(); + generate_snap(&sync_db_path, &r_snap_path).await?; let relay_artifacts = ChainArtifact { cmd: context_relay.doppelganger_cmd(), @@ -276,29 +277,26 @@ pub async fn doppelganger_inner( database, ) .await - .map_err(|e| anyhow!(e.to_string()))?; + .map_err(|e| anyhow!("Failed to generate config: {e}"))?; + // write config in 'bite' let config_toml_path = format!("{}/bite/config.toml", global_base_dir.to_string_lossy()); let toml_config = config.dump_to_toml()?; - fs::write(config_toml_path, &toml_config) - .await - .expect("create config.toml should works"); + fs::write(&config_toml_path, &toml_config).await?; // create port and ready files let rc_start_block = fs::read_to_string(format!("{base_dir_str}/rc_info.txt")) - .await - .unwrap() + .await? .parse::() - .expect("read bite rc block should works"); + .context("read bite rc block should works")?; // Collect start blocks for all parachains let mut para_start_blocks = serde_json::Map::new(); for para in ¶s_to { let para_start_block = fs::read_to_string(format!("{base_dir_str}/para-{}.txt", para.id())) - .await - .unwrap() + .await? .parse::() - .unwrap_or_else(|_| panic!("read bite para-{} block should works", para.id())); + .map_err(|e| anyhow!("Failed to parse para-{} start block: {e}", para.id()))?; para_start_blocks.insert( format!("para_{}_start_block", para.id()), serde_json::Value::Number(para_start_block.into()), @@ -320,36 +318,42 @@ pub async fn doppelganger_inner( .nodes() .into_iter() .find(|node| node.name() == "alice") - .expect("'alice' should exist"); + .context("'alice' should exist")?; // Collect ports for all parachains let mut collator_ports = serde_json::Map::new(); for para_config in config.parachains() { if let Some(collator) = para_config.collators().first() { + let port = collator.rpc_port().context(format!( + "Collator for para {} does not have an RPC port", + para_config.id() + ))?; collator_ports.insert( format!("para_{}_collator_port", para_config.id()), - serde_json::Value::Number(collator.rpc_port().unwrap().into()), + serde_json::Value::Number(port.into()), ); } } // ports + let alice_port = alice_config.rpc_port().context("Alice should have port")?; collator_ports.insert( "alice_port".to_string(), - serde_json::Value::Number(alice_config.rpc_port().unwrap().into()), + serde_json::Value::Number(alice_port.into()), ); let ports_content = serde_json::Value::Object(collator_ports); - let _ = fs::write( + fs::write( format!("{}/{PORTS_FILE}", global_base_dir.to_string_lossy()), ports_content.to_string(), ) - .await; - let _ = fs::write( + .await?; + + fs::write( format!("{}/{READY_FILE}", global_base_dir.to_string_lossy()), ready_content.to_string(), ) - .await; + .await?; clean_up_dir_for_step(global_base_dir, Step::Bite, &relay_chain, ¶s_to).await?; @@ -369,7 +373,7 @@ pub async fn generate_artifacts( // Parse config to get parachain information let network_config = zombienet_configuration::NetworkConfig::load_from_toml(&from_config_path) - .expect("should be able to load config"); + .context("should be able to load config")?; // generate snapshot for alice (rc) let alice_data = format!("{global_base_dir_str}/{}/alice/data", step.dir()); @@ -391,9 +395,8 @@ pub async fn generate_artifacts( let rc_spec_file = format!("{}-spec.json", rc.as_chain_string()); let rc_spec_from = format!("{global_base_dir_str}/{}/{rc_spec_file}", step.dir_from()); let rc_spec_to = format!("{global_base_dir_str}/{}/{rc_spec_file}", step.dir()); - fs::copy(&rc_spec_from, &rc_spec_to) - .await - .expect("cp should work"); + fs::copy(&rc_spec_from, &rc_spec_to).await?; + specs.push(rc_spec_to); // Generate snapshots and copy chain-specs for all parachains @@ -422,17 +425,14 @@ pub async fn generate_artifacts( para_spec_file ); let para_spec_to = format!("{global_base_dir_str}/{}/{}", step.dir(), para_spec_file); - fs::copy(¶_spec_from, ¶_spec_to) - .await - .expect("cp should work"); + fs::copy(¶_spec_from, ¶_spec_to).await?; + specs.push(para_spec_to); } // generate custom config let from_config_path = format!("{global_base_dir_str}/{}/config.toml", step.dir_from()); - let config = fs::read_to_string(&from_config_path) - .await - .expect("read config file should work"); + let config = fs::read_to_string(&from_config_path).await?; let db_snaps_in_file: Vec<(usize, &str)> = config.match_indices("db_snapshot").collect(); let needs_to_insert_db = db_snaps_in_file.len() != snaps.len(); let toml_config = config @@ -464,9 +464,7 @@ pub async fn generate_artifacts( // write config in 'dir' let config_toml_path = format!("{global_base_dir_str}/{}/config.toml", step.dir()); - fs::write(config_toml_path, &toml_config) - .await - .expect("create config.toml should works"); + fs::write(config_toml_path, &toml_config).await?; Ok(()) } @@ -483,21 +481,15 @@ pub async fn clean_up_dir_for_step( // if we already have a debug path, remove it if let Ok(true) = fs::try_exists(&debug_path).await { - fs::remove_dir_all(&debug_path) - .await - .expect("remove debug dir should works"); + fs::remove_dir_all(&debug_path).await?; } let step_path = format!("{global_base_dir_str}/{}", step.dir()); - fs::rename(&step_path, &debug_path) - .await - .expect("rename dir should works"); + fs::rename(&step_path, &debug_path).await?; info!("renamed dir from {step_path} to {debug_path}"); // create the step dir again - fs::create_dir_all(&step_path) - .await - .expect("Create step dir should works"); + fs::create_dir_all(&step_path).await?; info!("created dir {step_path}"); // Build list of needed files dynamically based on parachains @@ -526,9 +518,7 @@ pub async fn clean_up_dir_for_step( let from = format!("{debug_path}/{file}"); let to = format!("{step_path}/{file}"); info!("mv {from} {to}"); - fs::rename(&from, &to) - .await - .unwrap_or_else(|e| panic!("Failed to move {from} to {to}: {e}")); + fs::rename(&from, &to).await?; } Ok(()) @@ -786,13 +776,13 @@ pub async fn spawn( .with_base_dir(&base_dir) .with_tear_down_on_failure(false) .build() - .expect("global settings should work"); + .map_err(|e| anyhow!("Failed to build global settings: {:?}", e))?; let network_config = zombienet_configuration::NetworkConfig::load_from_toml_with_settings( &config_file, &global_settings, ) - .unwrap(); + .map_err(|e| anyhow!("Failed to load network config from TOML: {e}"))?; orchestrator .spawn(network_config) diff --git a/src/main.rs b/src/main.rs index e522742..3af87f9 100644 --- a/src/main.rs +++ b/src/main.rs @@ -3,6 +3,7 @@ use std::{ time::Duration, }; +use anyhow::Context; use clap::Parser; use futures::StreamExt; use tracing::{debug, error, info, level_filters::LevelFilter, trace, warn}; @@ -29,7 +30,7 @@ use crate::config::Step; const STOP_FILE: &str = "stop.txt"; /// Helpers fns -async fn resolve_if_dir_exist(base_path: &Path, step: Step) { +async fn resolve_if_dir_exist(base_path: &Path, step: Step) -> Result<(), anyhow::Error> { let base_path_str = base_path.to_string_lossy(); let path_to_use = format!("{base_path_str}/{}", step.dir()); let mut path_with_suffix = format!("{base_path_str}/{}", step.dir()); @@ -45,13 +46,15 @@ async fn resolve_if_dir_exist(base_path: &Path, step: Step) { if path_to_use != path_with_suffix { // spawn exist and we need to move the content warn!("'{}' dir exist, moving to {path_with_suffix}", step.dir()); - fs::rename(&path_to_use, &path_with_suffix) - .await - .expect("mv should work"); + fs::rename(&path_to_use, &path_with_suffix).await?; } + + Ok(()) } -async fn ensure_startup_producing_blocks(network: &Network) { +async fn ensure_startup_producing_blocks( + network: &Network, +) -> Result<(), anyhow::Error> { // Check metrics for all parachains and their collators let parachains = network.parachains(); for para in parachains { @@ -59,25 +62,32 @@ async fn ensure_startup_producing_blocks(network: &Network) { debug!("Waiting metrics for collator {}", collator.name()); collator .wait_metric_with_timeout("node_roles", |x| x > 1.0, 300_u64) - .await - .unwrap(); + .await?; } } // ensure block production let client = network .get_node("alice") - .unwrap() + .context("Node 'alice' not found")? .wait_client::() .await - .unwrap(); - let mut blocks = client.blocks().subscribe_finalized().await.unwrap().take(3); + .map_err(|e| anyhow::anyhow!("Failed to wait for client for 'alice': {e}"))?; + let mut blocks = client + .blocks() + .subscribe_finalized() + .await + .map_err(|e| anyhow::anyhow!("Failed to subscribe to finalized blocks: {e}"))? + .take(3); while let Some(block) = blocks.next().await { - info!("Block #{}", block.unwrap().header().number); + let block = block.map_err(|e| anyhow::anyhow!("Failed to get block: {e}"))?; + info!("Block #{}", block.header().number); } info!("🚀🚀🚀 network is up and running..."); + + Ok(()) } async fn post_spawn_loop( @@ -100,7 +110,7 @@ async fn post_spawn_loop( debug!("No collator found, monitoring only validators"); } - monit_progress(alice, bob, collator_opt, Some(stop_file)).await; + monit_progress(alice, bob, collator_opt, Some(stop_file)).await?; } else { while let Ok(false) = fs::try_exists(&stop_file).await { tokio::time::sleep(Duration::from_secs(60)).await; @@ -122,12 +132,8 @@ async fn tear_down_and_generate( if let Ok(true) = teardown_signal { // create the artifacts - doppelganger::generate_artifacts(base_path.clone(), step, &rc) - .await - .expect("generate should works"); - doppelganger::clean_up_dir_for_step(base_path, step, &rc, &[]) - .await - .expect("clean-up should works"); + doppelganger::generate_artifacts(base_path.clone(), step, &rc).await?; + doppelganger::clean_up_dir_for_step(base_path, step, &rc, &[]).await?; } // signal that the teardown is completed @@ -183,8 +189,7 @@ async fn main() -> Result<(), anyhow::Error> { resolved_config.parachains, &database, ) - .await - .expect("bite should work"); + .await?; if resolved_config.and_spawn { let step = Step::Spawn; @@ -194,13 +199,12 @@ async fn main() -> Result<(), anyhow::Error> { resolved_config.base_path.to_string_lossy() ); - resolve_if_dir_exist(&resolved_config.base_path, step).await; + resolve_if_dir_exist(&resolved_config.base_path, step).await?; let network = doppelganger::spawn(step, resolved_config.base_path.as_path(), None, None) - .await - .expect("spawn should works"); + .await?; - ensure_startup_producing_blocks(&network).await; + ensure_startup_producing_blocks(&network).await?; post_spawn_loop(&stop_file, &network, true).await?; @@ -218,24 +222,19 @@ async fn main() -> Result<(), anyhow::Error> { let step: Step = step.into(); let base_path_str = resolved_config.base_path.to_string_lossy(); - if !fs::try_exists(format!("{base_path_str}/{}", step.dir_from())) - .await - .expect("try_exist should work") - { + if !fs::try_exists(format!("{base_path_str}/{}", step.dir_from())).await? { println!("\t\x1b[91mThe 'bite' dir doesn't exist, please run the bite subcommand first.\x1b[0m"); println!("\tHelp: zombie-bite bite --help"); std::process::exit(1); } - resolve_if_dir_exist(&resolved_config.base_path, step).await; + resolve_if_dir_exist(&resolved_config.base_path, step).await?; let network = - doppelganger::spawn(step, resolved_config.base_path.as_path(), None, None) - .await - .expect("spawn should works"); + doppelganger::spawn(step, resolved_config.base_path.as_path(), None, None).await?; - ensure_startup_producing_blocks(&network).await; + ensure_startup_producing_blocks(&network).await?; // STOP file let stop_file = format!("{base_path_str}/{STOP_FILE}"); @@ -252,9 +251,7 @@ async fn main() -> Result<(), anyhow::Error> { let rc = Relaychain::new(&relay); let step: Step = step.into(); let base_path = get_base_path(base_path); - doppelganger::generate_artifacts(base_path, step, &rc) - .await - .expect("generate artifacts should work") + doppelganger::generate_artifacts(base_path, step, &rc).await? } Commands::CleanUpDir { relay, @@ -264,9 +261,7 @@ async fn main() -> Result<(), anyhow::Error> { let rc = Relaychain::new(&relay); let step: Step = step.into(); let base_path = get_base_path(base_path); - doppelganger::clean_up_dir_for_step(base_path, step, &rc, &[]) - .await - .expect("clean-up should works"); + doppelganger::clean_up_dir_for_step(base_path, step, &rc, &[]).await?; } }; Ok(()) diff --git a/src/monit.rs b/src/monit.rs index aca7b9d..3613021 100644 --- a/src/monit.rs +++ b/src/monit.rs @@ -42,15 +42,13 @@ pub async fn monit_progress( bob: &NetworkNode, collator: Option<&NetworkNode>, stop_file: Option<&str>, -) { +) -> Result<(), anyhow::Error> { // monitoring block production every 15 mins - let mut alice_block = progress(alice, 0).await.expect("first check should works"); - let mut bob_block = progress(bob, 0).await.expect("first check should works"); + let mut alice_block = progress(alice, 0).await?; + let mut bob_block = progress(bob, 0).await?; let mut collator_block = if let Some(collator) = collator { - progress(collator, 0) - .await - .expect("first check should works") + progress(collator, 0).await? } else { // no collator deployed. -1_f64 @@ -109,4 +107,6 @@ pub async fn monit_progress( check_progress().await } } + + Ok(()) } diff --git a/src/sync.rs b/src/sync.rs index 8779839..22396f5 100644 --- a/src/sync.rs +++ b/src/sync.rs @@ -103,7 +103,7 @@ pub async fn sync_para( info_path: impl AsRef, maybe_target_header: Option, database: &str, -) -> Result<(DynNode, String, String, String), ()> { +) -> Result<(DynNode, String, String, String), anyhow::Error> { let chain = parachain.as_chain_string(&relaychain.as_chain_string()); let para_id_str = parachain.id().to_string(); let sync_db_path = format!( @@ -142,13 +142,10 @@ pub async fn sync_para( let dest_for_paseo = format!("{}/asset-hub-paseo.json", ns.base_dir().to_string_lossy(),); let chain_arg = if chain == "asset-hub-paseo" { // get chain spec from https://paseo-r2.zondax.ch/chain-specs/paseo-asset-hub.json - let response = reqwest::get(PASEO_ASSET_HUB_SPEC_URL) - .await - .unwrap_or_else(|_| panic!("Create file {dest_for_paseo} should work")); - let mut file = std::fs::File::create(&dest_for_paseo) - .unwrap_or_else(|_| panic!("Create file {dest_for_paseo} should work")); - let mut content = Cursor::new(response.bytes().await.expect("Create cursor should works.")); - std::io::copy(&mut content, &mut file).expect("Copy bytes should works."); + let response = reqwest::get(PASEO_ASSET_HUB_SPEC_URL).await?; + let mut file = std::fs::File::create(&dest_for_paseo)?; + let mut content = Cursor::new(response.bytes().await?); + std::io::copy(&mut content, &mut file)?; dest_for_paseo.as_str() } else { chain.as_ref() @@ -187,20 +184,20 @@ pub async fn sync_para( .env(env); info!("🔎 sync para opts: {:?}", opts); - let sync_node = ns.spawn_node(&opts).await.unwrap(); + let sync_node = ns.spawn_node(&opts).await?; let metrics_url = format!("http://127.0.0.1:{metrics_random_port}/metrics"); debug!("prometheus link http://127.0.0.1:{metrics_random_port}/metrics"); info!("📓 sync para logs: {}", sync_node.log_cmd()); wait_ws_ready(&metrics_url).await.unwrap(); - let url = reqwest::Url::try_from(metrics_url.as_str()).unwrap(); + let url = reqwest::Url::try_from(metrics_url.as_str())?; match wait_sync(url).await { Ok(_) => info!("✅ Synced (chain: {}), stopping node.", chain), Err(e) => { error!("❌ Sync failed for parachain {}: {}", chain, e); - return Err(()); + return Err(e); } } // we should just paused @@ -208,7 +205,6 @@ pub async fn sync_para( Ok(( sync_node, sync_db_path, - //chain.as_ref().to_string(), chain_arg.to_string(), para_head_path, )) diff --git a/src/utils.rs b/src/utils.rs index 32a9ebd..4ec089e 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -4,7 +4,7 @@ use std::path::{Path, PathBuf}; use std::str::FromStr; -use anyhow::anyhow; +use anyhow::{anyhow, Context}; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use serde_json::json; @@ -320,14 +320,14 @@ pub fn para_id_for_map_hash(para_id: u32) -> String { pub async fn localize_config(config_path: impl AsRef) -> Result<(), anyhow::Error> { let config_path = PathBuf::from_str(config_path.as_ref())?; - let base_path = config_path.parent().unwrap(); + let base_path = config_path + .parent() + .context("config path should have a parent")?; let mut localized = false; // read config - let config_content = fs::read_to_string(&config_path) - .await - .expect("read config should works"); + let config_content = fs::read_to_string(&config_path).await?; let mut config_modified = vec![]; for line in config_content.lines() { match line { @@ -371,11 +371,9 @@ pub async fn localize_config(config_path: impl AsRef) -> Result<(), anyhow: &config_path, &format!("{}/original-config.toml", &base_path.to_string_lossy()), ) - .await - .expect("rename should works"); - fs::write(&config_path, config_modified.join("\n")) - .await - .expect("write should works"); + .await?; + + fs::write(&config_path, config_modified.join("\n")).await?; } Ok(())