Selaa lähdekoodia

error handling

Thomas 1 vuosi sitten
vanhempi
commit
c085500a20

+ 6 - 0
README.md

@@ -32,3 +32,9 @@ jq -L ./jq_filters -C 'include "jq_variants"; [.data[] | select(contig("chrM") a
 ```
 find /data/longreads_basic_pipe/ -name "*_diag_hs1_info.json" -type f -exec sh -c 'basename $(dirname $(dirname "{}")) | tr -d "\n"' \; -printf "\t" -exec jq -L ./jq_filters -r 'include "jq_bam"; contig_coverage("chrM")' {} \;
 ```
+
+### Reading log files
+```
+zcat /data/longreads_basic_pipe/ID/log/deepsomatic/deepvariant_e7ed1.log.gz | jq -r '.log'
+
+```

+ 0 - 2
jq_filters/jq_variants.jq

@@ -39,5 +39,3 @@ def contig(contig_str):
 def get_value(key):
 	map(select(has($key)) | .[$key]) | first // null;
 
-def vcf_format(vcf_variants):
-	vcf_variants | select()

+ 3 - 1
src/annotation/vep.rs

@@ -1,7 +1,7 @@
 use anyhow::anyhow;
 use hashbrown::HashMap;
 use itertools::Itertools;
-use log::warn;
+use log::{debug, warn};
 use serde::{Deserialize, Serialize};
 use std::{
     cmp::{Ordering, Reverse},
@@ -446,6 +446,8 @@ impl FromStr for VEPExtra {
 
 // VEP need plugin Downstream and SpliceRegion /home/prom/.vep/Plugins
 pub fn run_vep(in_path: &str, out_path: &str) -> anyhow::Result<()> {
+    debug!("Run VEP for {in_path} and ouput {out_path}");
+
     let bin_dir = "/data/tools/ensembl-vep";
     let dir_cache = "/data/ref/hs1/vepcache/";
     let fasta = "/data/ref/hs1/chm13v2.0.fa";

+ 13 - 15
src/callers/deep_variant.rs

@@ -131,36 +131,34 @@ impl CallerCat for DeepVariant {
             tumoral_name,
             ..
         } = &self.config;
-        let caller = if *normal_name == self.time {
+        if *normal_name == self.time {
             Annotation::Callers(Caller::DeepVariant, Sample::SoloConstit)
         } else if *tumoral_name == self.time {
             Annotation::Callers(Caller::DeepVariant, Sample::SoloTumor)
         } else {
             panic!("Error in time_point name: {}", self.time);
-        };
-        caller
+        }
     }
 }
 
 impl Variants for DeepVariant {
     fn variants(&self, annotations: &Annotations) -> anyhow::Result<VariantCollection> {
         let caller = self.caller_cat();
-        info!(
-            "Loading variants from {}: {}",
-            caller, self.vcf_passed
-        );
-        let variants = read_vcf(&self.vcf_passed)?;
+        info!("Loading variants from {}: {}", caller, self.vcf_passed);
+        let variants = read_vcf(&self.vcf_passed)
+            .map_err(|e| anyhow::anyhow!("Error while reading VFC {}.\n{e}", self.vcf_passed))?;
         variants.par_iter().for_each(|v| {
-            annotations.insert_update(v.hash(), &vec![caller.clone()]);
+            annotations.insert_update(v.hash(), &[caller.clone()]);
         });
-        info!(
-            "{}, {} variants loaded.",
-            caller,
-            variants.len()
-        );
+        info!("{}, {} variants loaded.", caller, variants.len());
         Ok(VariantCollection {
             variants,
-            vcf: Vcf::new(self.vcf_passed.clone().into())?,
+            vcf: Vcf::new(self.vcf_passed.clone().into()).map_err(|e| {
+                anyhow::anyhow!(
+                    "Error while creating a VCF representation for {}.\n{e}",
+                    self.vcf_passed
+                )
+            })?,
             caller,
         })
     }

+ 5 - 0
src/config.rs

@@ -6,6 +6,7 @@ pub struct Config {
     pub reference: String,
     pub reference_name: String,
     pub dict_file: String,
+    pub docker_max_memory_go: u16,
     pub savana_bin: String,
     pub savana_threads: u8,
     pub tumoral_name: String,
@@ -61,6 +62,7 @@ pub struct Config {
     pub nanomonsv_passed_vcf: String,
     pub nanomonsv_solo_output_dir: String,
     pub nanomonsv_solo_passed_vcf: String,
+    pub somatic_pipe_force: bool,
 }
 
 // Here comes names that can't be changed from output of tools
@@ -83,6 +85,8 @@ impl Default for Config {
             reference_name: "hs1".to_string(),
             dict_file: "/data/ref/hs1/chm13v2.0.dict".to_string(),
 
+            docker_max_memory_go: 400,
+
             // File structure
             result_dir: "/data/longreads_basic_pipe".to_string(),
 
@@ -168,6 +172,7 @@ impl Default for Config {
                 .to_string(),
 
             // Pipe
+            somatic_pipe_force: true,
             solo_min_constit_depth: 5,
             solo_max_alt_constit: 1,
             min_shannon_entropy: 1.0,

+ 29 - 1
src/lib.rs

@@ -39,7 +39,7 @@ mod tests {
     use positions::{overlaps_par, GenomePosition, GenomeRange};
     use rayon::prelude::*;
     use runners::Run;
-    use variant::variant::{VcfVariant, Variants};
+    use variant::{variant::{Variants, VcfVariant}, variant_collection};
 
     use self::{collection::pod5::{FlowCellCase, Pod5Collection}, commands::dorado, config::Config};
     use super::*;
@@ -623,6 +623,7 @@ mod tests {
         }
 
     }
+
     #[test]
     fn run_somatic() -> anyhow::Result<()> {
         init();
@@ -646,4 +647,31 @@ mod tests {
         }
         Ok(())
     }
+
+    // #[test]
+    // fn run_somatic() -> anyhow::Result<()> {
+    //     init();
+    //     let id = "ADJAGBA";
+    //     let mut config = Config::default();
+    //     config.somatic_pipe_force = true;
+    //     match Somatic::initialize(id, config)?.run() {
+    //         Ok(_) => (),
+    //         Err(e) => error!("{id} {e}"),
+    //     };
+    //     Ok(())
+    // }
+    
+    #[test]
+    fn load_variants() -> anyhow::Result<()> {
+        init();
+        let id = "ADJAGBA";
+        let config = Config::default();
+        let path = format!("{}/{id}/diag//somatic_variants.json.gz", config.result_dir);
+        let variants = variant_collection::Variants::load_from_json(&path)?;
+        println!("n variants {}", variants.data.len());
+
+        let n_vep: usize = variants.data.iter().map(|v| v.vep().len()).sum();
+        println!("VEP: {n_vep}");
+        Ok(())
+    }
 }

+ 21 - 9
src/pipes/somatic.rs

@@ -1,5 +1,5 @@
 use itertools::Itertools;
-use log::info;
+use log::{debug, info};
 use std::{
     collections::HashMap,
     fs::{self, File},
@@ -19,7 +19,7 @@ use crate::{
     init_solo_callers, init_somatic_callers,
     runners::Run,
     variant::{
-        variant::{load_variants, CallerBox},
+        variant::{load_variants, run_variants, CallerBox},
         variant_collection::{ExternalAnnotation, VariantCollection, Variants},
     },
 };
@@ -238,7 +238,7 @@ impl Run for Somatic {
 
         let result_json = format!("{}/somatic_variants.json.gz", config.tumoral_dir(&id));
 
-        if Path::new(&result_json).exists() {
+        if Path::new(&result_json).exists() && !config.somatic_pipe_force {
             return Err(anyhow::anyhow!("already exists"));
         }
 
@@ -257,9 +257,13 @@ impl Run for Somatic {
         info!("Initialization of callers...");
 
         let mut callers = init_somatic_callers!(
-            &id, &config, ClairS, NanomonSV, // Severus,
+            &id,
+            &config,
+            ClairS,
+            NanomonSV,
+            Severus,
             Savana,
-            // DeepSomatic
+            DeepSomatic
         );
 
         callers.extend(init_solo_callers!(
@@ -272,12 +276,17 @@ impl Run for Somatic {
         ));
 
         // Loading
+        info!("Running variants callers.");
+        run_variants(&mut callers)?;
+
         info!("Loading variants.");
-        let mut variants_collections = load_variants(&mut callers, &annotations)?;
+        let mut variants_collections = load_variants(&mut callers, &annotations)
+            .map_err(|e| anyhow::anyhow!("Error while loading variants\n{e}"))?;
 
-        let clairs_germline =
-            ClairS::initialize(&id, self.config.clone())?.germline(&annotations)?;
-        variants_collections.push(clairs_germline);
+        info!("Loading Germline");
+        // let clairs_germline =
+        //     ClairS::initialize(&id, self.config.clone())?.germline(&annotations)?;
+        // variants_collections.push(clairs_germline);
 
         let mut somatic_stats = SomaticStats::init(&variants_collections);
         info!(
@@ -436,6 +445,7 @@ impl Run for Somatic {
             "{} variants filtered, with constit alt <= max contig alt ({}) and in GnomAD.",
             somatic_stats.n_high_alt_constit_gnomad, self.config.solo_max_alt_constit
         );
+        // TODO: These stats doesn't capture filter metrics !!!
         annotations
             .callers_stat(Some(Box::new(|v| {
                 matches!(
@@ -492,6 +502,8 @@ impl Run for Somatic {
             },
         );
 
+        annotations.vep_stats()?;
+
         info!("Final unique variants: {}", variants.data.len());
         variants.save_to_json(&result_json)?;
 

+ 4 - 11
src/runners.rs

@@ -15,7 +15,7 @@ use log::{info, warn};
 use serde::{Deserialize, Serialize};
 use uuid::Uuid;
 
-use crate::DOCKER_ID;
+use crate::{config::Config, DOCKER_ID};
 
 pub trait Run {
     fn run(&mut self) -> anyhow::Result<()>;
@@ -48,15 +48,6 @@ pub struct RunReport {
 }
 
 impl RunReport {
-    // pub fn save_to_file(&self, file_prefix: &str) -> std::io::Result<()> {
-    //     let json_data = serde_json::to_string_pretty(self).expect("Failed to serialize RunReport");
-    //     let uuid = Uuid::new_v4().to_string()[..5].to_string();
-    //     let file_path = format!("{}{}.log", file_prefix, uuid);
-    //     let mut file = File::create(&file_path)?;
-    //     file.write_all(json_data.as_bytes())?;
-    //     Ok(())
-    // }
-
     /// Serialize the RunReport to a JSON string and save to file_prefix.log.gz
     pub fn save_to_file(&self, file_prefix: &str) -> std::io::Result<()> {
         let json_data = serde_json::to_string_pretty(self).expect("Failed to serialize RunReport");
@@ -106,7 +97,9 @@ impl Run for DockerRun {
         });
 
         // limit memory
-        self.args.insert(0, "--memory=400g".to_string());
+        let c = Config::default();
+        self.args
+            .insert(1, format!("--memory={}g", c.docker_max_memory_go));
 
         // Spawn the main command
         let output = Command::new("docker")

+ 10 - 9
src/variant/variant.rs

@@ -895,21 +895,22 @@ macro_rules! init_solo_callers {
     };
 }
 
+pub fn run_variants(iterable: &mut [CallerBox]) -> anyhow::Result<()> {
+    iterable
+        .iter_mut()
+        .try_for_each(|runner| runner.run())
+        .map_err(|e| anyhow::anyhow!("Error in run variants.\n{e}"))
+}
+
 pub fn load_variants(
     iterable: &mut [CallerBox],
     annotations: &Annotations,
 ) -> anyhow::Result<Vec<VariantCollection>> {
-    // iterable
-    //     .iter_mut()
-    //     .try_for_each(|runner| runner.run())?;
-
-    // Then, collect variants from all items in parallel
-    let variants: Vec<VariantCollection> = iterable
+    iterable
         .par_iter()
         .map(|runner| runner.variants(annotations))
-        .collect::<anyhow::Result<Vec<_>>>()?;
-
-    Ok(variants)
+        .collect::<anyhow::Result<Vec<_>>>()
+        .map_err(|e| anyhow::anyhow!("Error in load variants.\n{e}"))
 }
 
 pub fn parallel_intersection<T: Hash + Eq + Clone + Send + Sync>(

+ 151 - 85
src/variant/variant_collection.rs

@@ -2,12 +2,13 @@ use std::{
     collections::{HashMap, HashSet},
     fs::{self, File},
     io::Write,
+    path::Path,
 };
 
 use anyhow::Context;
 use bgzip::{BGZFReader, BGZFWriter};
 use csv::ReaderBuilder;
-use log::{debug, info, warn};
+use log::{debug, error, info, warn};
 use rayon::prelude::*;
 use serde::{Deserialize, Serialize};
 use uuid::Uuid;
@@ -207,6 +208,21 @@ impl PartialEq for Variant {
     }
 }
 
+impl Variant {
+    pub fn vep(&self) -> Vec<VEP> {
+        self.annotations
+            .iter()
+            .flat_map(|a| {
+                if let Annotation::VEP(v) = a {
+                    v.to_vec()
+                } else {
+                    vec![]
+                }
+            })
+            .collect()
+    }
+}
+
 #[derive(Debug, Default, Serialize, Deserialize)]
 pub struct Variants {
     pub data: Vec<Variant>,
@@ -572,6 +588,7 @@ impl ExternalAnnotation {
     ) -> anyhow::Result<()> {
         unfound.par_sort();
         unfound.dedup();
+        info!("{} variants to annotate with VEP.", unfound.len());
 
         let header = vcf_header("/data/ref/hs1/chm13v2.0.dict")?.join("\n");
 
@@ -583,87 +600,23 @@ impl ExternalAnnotation {
         let min_chunk_size = 1000;
         let max_chunks = 150;
 
-        let mut results = if !unfound.is_empty() {
+        let mut results: Vec<(Hash128, Vec<VEP>)> = if !unfound.is_empty() {
             let optimal_chunk_size = unfound.len().div_ceil(max_chunks as usize);
             let optimal_chunk_size = optimal_chunk_size.max(min_chunk_size);
 
+            debug!("{} chunks to process.", unfound.len() / optimal_chunk_size);
             unfound
                 .par_chunks(optimal_chunk_size)
-                .flat_map(|chunk| -> anyhow::Result<Vec<_>> {
-                    let in_tmp = temp_file_path("vcf")?.to_str().unwrap().to_string();
-                    let out_vep = temp_file_path("_vep.txt")?.to_str().unwrap().to_string();
-                    let out_summary = format!("{out_vep}_summary.html");
-                    let out_warnings = format!("{out_vep}_warnings.txt");
-
-                    // Write input VCF
-                    let mut vcf = File::create(&in_tmp)?;
-                    writeln!(vcf, "{}", header)?;
-                    for (i, row) in chunk.iter().enumerate() {
-                        writeln!(
-                            vcf,
-                            "{}\t{}\t{}\t{}\t{}\t.\tPASS\t.\t.\t.",
-                            row.position.contig(),
-                            row.position.position + 1, // vcf
-                            i + 1,
-                            row.reference,
-                            row.alternative
-                        )?;
-                    }
-
-                    run_vep(&in_tmp, &out_vep)?;
-
-                    let mut reader_vep = ReaderBuilder::new()
-                        .delimiter(b'\t')
-                        .has_headers(false)
-                        .comment(Some(b'#'))
-                        .flexible(true)
-                        .from_reader(fs::File::open(out_vep.clone())?);
-
-                    let mut lines: HashMap<u64, Vec<VepLine>> = HashMap::new();
-                    for line in reader_vep.deserialize() {
-                        let line: VepLine = line.context("Failed to deserialize VepLine")?;
-                        let key = line
-                            .uploaded_variation
-                            .parse::<u64>()
-                            .context("Failed to parse uploaded_variation as u64")?;
-
-                        lines.entry(key).or_default().push(line);
-                    }
-
-                    fs::remove_file(in_tmp)?;
-                    fs::remove_file(out_vep)?;
-
-                    let mut n_not_vep = 0;
-                    let mut chunk_results: Vec<(Hash128, Vec<VEP>)> = Vec::new();
-
-                    chunk.iter().enumerate().for_each(|(i, entry)| {
-                        let k = (i + 1) as u64;
-
-                        if let Some(vep_lines) = lines.get(&k) {
-                            if let Ok(veps) = vep_lines.iter().map(VEP::try_from).collect() {
-                                chunk_results.push((entry.hash(), veps));
-                            }
-                        } else {
-                            warn!(
-                                "No VEP entry for {}:{}>{}",
-                                entry.position.to_string(),
-                                entry.reference.to_string(),
-                                entry.alternative.to_string()
-                            );
-                            n_not_vep += 1;
-                        }
-                    });
-
-                    if n_not_vep > 0 {
-                        debug!("{n_not_vep} variants not annotated by VEP.");
-                        let warnings = fs::read_to_string(&out_warnings)
-                            .context(format!("Can't read VEP warnings: {out_warnings}"))?;
-                        warn!("VEP warnings:\n{warnings}");
-                    }
-                    fs::remove_file(out_warnings)?;
-                    fs::remove_file(out_summary)?;
-                    Ok(chunk_results)
+                .enumerate()
+                .map(|(chunk_i, chunk)| {
+                    debug!("Processing chunk {chunk_i}");
+                    process_vep_chunk(chunk, &header).map_err(|e| {
+                        error!("Error processing chunk {chunk_i}: {e}");
+                        e
+                    })
                 })
+                .collect::<Result<Vec<_>, _>>()? // Collect results into a Result<Vec<_>>
+                .into_iter()
                 .flatten()
                 .collect::<Vec<_>>()
         } else {
@@ -677,13 +630,23 @@ impl ExternalAnnotation {
             let results_sv = sv
                 .par_chunks(optimal_chunk_size)
                 .flat_map(|chunk| -> anyhow::Result<Vec<_>> {
-                    let in_tmp = temp_file_path(".vcf")?.to_str().unwrap().to_string();
-                    let out_vep = temp_file_path("_vep.txt")?.to_str().unwrap().to_string();
+                    let in_tmp = temp_file_path(".vcf")
+                        .context("Can't create tmp path for in tmp")?
+                        .to_str()
+                        .unwrap()
+                        .to_string();
+                    let out_vep = temp_file_path("_vep.txt")
+                        .context("Can't create tmp path for in tmp")?
+                        .to_str()
+                        .unwrap()
+                        .to_string();
+
                     let out_summary = format!("{out_vep}_summary.html");
                     let out_warnings = format!("{out_vep}_warnings.txt");
 
                     // Write input VCF
-                    let mut vcf = File::create(&in_tmp)?;
+                    let mut vcf =
+                        File::create(&in_tmp).context("Can't create input vcf file for VEP.")?;
                     writeln!(vcf, "{}", header)?;
                     for (i, mut row) in chunk.iter().cloned().enumerate() {
                         row.id = (i + 1).to_string();
@@ -691,14 +654,16 @@ impl ExternalAnnotation {
                         writeln!(vcf, "{s}",)?;
                     }
 
-                    run_vep(&in_tmp, &out_vep)?;
+                    run_vep(&in_tmp, &out_vep).context("Error while running VEP.")?;
 
                     let mut reader_vep = ReaderBuilder::new()
                         .delimiter(b'\t')
                         .has_headers(false)
                         .comment(Some(b'#'))
                         .flexible(true)
-                        .from_reader(fs::File::open(out_vep.clone())?);
+                        .from_reader(
+                            fs::File::open(&out_vep).context("Can't open VEP result file.")?,
+                        );
 
                     let mut lines: HashMap<u64, Vec<VepLine>> = HashMap::new();
                     for line in reader_vep.deserialize() {
@@ -711,8 +676,7 @@ impl ExternalAnnotation {
                         lines.entry(key).or_default().push(line);
                     }
 
-                    fs::remove_file(in_tmp)?;
-                    fs::remove_file(out_vep)?;
+                    // fs::remove_file(&in_tmp).context(format!("Can't remove file {in_tmp}"))?;
 
                     let mut n_not_vep = 0;
                     let mut chunk_results: Vec<(Hash128, Vec<VEP>)> = Vec::new();
@@ -734,6 +698,7 @@ impl ExternalAnnotation {
                             n_not_vep += 1;
                         }
                     });
+                    // fs::remove_file(&out_vep).context(format!("Can't remove file {out_vep}"))?;
 
                     if n_not_vep > 0 {
                         debug!("{n_not_vep} variants not annotated by VEP.");
@@ -741,8 +706,14 @@ impl ExternalAnnotation {
                             .context(format!("Can't read VEP warnings: {out_warnings}"))?;
                         warn!("VEP warnings:\n{warnings}");
                     }
-                    fs::remove_file(out_warnings)?;
-                    fs::remove_file(out_summary)?;
+                    if Path::new(&out_warnings).exists() {
+                        fs::remove_file(&out_warnings)
+                            .context(format!("Can't remove file {out_warnings}"))?;
+                    }
+                    if Path::new(&out_summary).exists() {
+                        fs::remove_file(&out_summary)
+                            .context(format!("Can't remove file {out_summary}"))?;
+                    }
                     Ok(chunk_results)
                 })
                 .flatten()
@@ -750,6 +721,7 @@ impl ExternalAnnotation {
 
             results.extend(results_sv);
         }
+        info!("{} total variants annotaded by VEP.", results.len());
 
         for (hash, veps) in results {
             // self.update_database(hash, "vep", &serde_json::to_vec(&veps)?)?;
@@ -774,3 +746,97 @@ impl ExternalAnnotation {
         Ok(())
     }
 }
+
+fn process_vep_chunk(
+    chunk: &[VcfVariant],
+    header: &str,
+) -> anyhow::Result<Vec<(Hash128, Vec<VEP>)>> {
+    let in_tmp = temp_file_path("vcf")?
+        .to_str()
+        .ok_or_else(|| anyhow::anyhow!("Failed to convert temp file path to string"))?
+        .to_string();
+    let out_vep = temp_file_path("_vep.txt")?
+        .to_str()
+        .ok_or_else(|| anyhow::anyhow!("Failed to convert temp file path to string"))?
+        .to_string();
+
+    let out_summary = format!("{out_vep}_summary.html");
+    let out_warnings = format!("{out_vep}_warnings.txt");
+
+    let mut vcf = File::create(&in_tmp)?; // If this fails, the error is propagated.
+    writeln!(vcf, "{}", header)?; // If this fails, the error is propagated.
+
+    for (i, row) in chunk.iter().enumerate() {
+        writeln!(
+            vcf,
+            "{}\t{}\t{}\t{}\t{}\t.\tPASS\t.\t.\t.",
+            row.position.contig(),
+            row.position.position + 1,
+            i + 1,
+            row.reference,
+            row.alternative
+        )?;
+    }
+
+    if let Err(e) = run_vep(&in_tmp, &out_vep) {
+        error!("VEP error: {e}");
+        return Err(anyhow::anyhow!("VEP execution failed: {}", e)); // Propagate the error.
+    }
+
+    let mut reader_vep = ReaderBuilder::new()
+        .delimiter(b'\t')
+        .has_headers(false)
+        .comment(Some(b'#'))
+        .flexible(true)
+        .from_reader(fs::File::open(&out_vep)?); // If this fails, the error is propagated.
+
+    let mut lines: HashMap<u64, Vec<VepLine>> = HashMap::new();
+    for line in reader_vep.deserialize() {
+        let line: VepLine = line.context("Failed to deserialize VepLine")?; // Propagate the error.
+        let key = line
+            .uploaded_variation
+            .parse::<u64>()
+            .context("Failed to parse uploaded_variation as u64")?; // Propagate the error.
+        lines.entry(key).or_default().push(line);
+    }
+
+    fs::remove_file(&in_tmp).context(format!("Can't remove file {in_tmp}"))?;
+
+    let mut n_not_vep = 0;
+    let mut chunk_results: Vec<(Hash128, Vec<VEP>)> = Vec::new();
+
+    chunk.iter().enumerate().for_each(|(i, entry)| {
+        let k = (i + 1) as u64;
+
+        if let Some(vep_lines) = lines.get(&k) {
+            if let Ok(veps) = vep_lines.iter().map(VEP::try_from).collect() {
+                chunk_results.push((entry.hash(), veps));
+            }
+        } else {
+            warn!(
+                "No VEP entry for {}\t{}\t{}",
+                entry.position.to_string(),
+                entry.reference.to_string(),
+                entry.alternative.to_string()
+            );
+            n_not_vep += 1;
+        }
+    });
+    fs::remove_file(&out_vep).context(format!("Can't remove file {out_vep}"))?;
+
+    if n_not_vep > 0 {
+        debug!("{n_not_vep} variants not annotated by VEP.");
+        let warnings = fs::read_to_string(&out_warnings)
+            .context(format!("Can't read VEP warnings: {out_warnings}"))?;
+        warn!("VEP warnings:\n{warnings}");
+    }
+
+    if Path::new(&out_warnings).exists() {
+        fs::remove_file(&out_warnings).context(format!("Can't remove file {out_warnings}"))?;
+    }
+    if Path::new(&out_summary).exists() {
+        fs::remove_file(&out_summary).context(format!("Can't remove file {out_summary}"))?;
+    }
+
+    Ok(chunk_results) // Return the successful result.
+}