Skip to content

Commit

Permalink
refactor(errors)!: refactored errors
Browse files Browse the repository at this point in the history
  • Loading branch information
jspaezp committed Oct 31, 2024
1 parent 37e39a5 commit c4cde15
Show file tree
Hide file tree
Showing 7 changed files with 129 additions and 161 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,12 @@ tracing-subscriber = { version = "0.3.18", features = [
"env-filter",
] }
tracing-bunyan-formatter = "0.3.9"
tracing-chrome = "0.7.2"


# These are only used for benchmarks
rand = { version = "0.8.5", optional = true }
rand_chacha = { version = "0.3.1", optional = true }
tracing-chrome = "0.7.2"


[features]
Expand Down
58 changes: 58 additions & 0 deletions src/errors.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
use crate::models::frames::expanded_frame::FrameProcessingConfig;
use std::fmt::Display;
use timsrust::TimsRustError;

#[derive(Debug)]
pub enum TimsqueryError {
DataReadingError(DataReadingError),
DataProcessingError(DataProcessingError),
Other(String),
}

pub type Result<T> = std::result::Result<T, TimsqueryError>;

impl Display for TimsqueryError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self)
}
}

impl TimsqueryError {
pub fn custom(msg: impl Display) -> Self {
Self::Other(msg.to_string())
}
}

#[derive(Debug)]
pub enum DataReadingError {
UnsupportedDataError(UnsupportedDataError),
TimsRustError(TimsRustError), // Why doesnt timsrust error derive clone?
}

impl From<UnsupportedDataError> for DataReadingError {
fn from(e: UnsupportedDataError) -> Self {
DataReadingError::UnsupportedDataError(e)
}
}

#[derive(Debug)]
pub enum UnsupportedDataError {
NoMS2DataError,
}

#[derive(Debug)]
pub enum DataProcessingError {
CentroidingError(FrameProcessingConfig),
}

impl<T: Into<DataReadingError>> From<T> for TimsqueryError {
fn from(e: T) -> Self {
TimsqueryError::DataReadingError(e.into())
}
}

impl<T: Into<TimsRustError>> From<T> for DataReadingError {
fn from(e: T) -> Self {
DataReadingError::TimsRustError(e.into())
}
}
4 changes: 4 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,11 @@ pub use crate::traits::queriable_data::QueriableData;
pub use crate::traits::tolerance::{Tolerance, ToleranceAdapter};

// Declare modules
pub mod errors;
pub mod models;
pub mod queriable_tims_data;
pub mod traits;
pub mod utils;

// Re-export errors
pub use crate::errors::{DataProcessingError, DataReadingError, TimsqueryError};
139 changes: 51 additions & 88 deletions src/models/frames/expanded_frame.rs
Original file line number Diff line number Diff line change
@@ -1,23 +1,20 @@
use super::peak_in_quad::PeakInQuad;
use super::single_quad_settings::{
expand_quad_settings, ExpandedFrameQuadSettings, SingleQuadrupoleSetting,
};
use crate::errors::{Result, UnsupportedDataError};
use crate::sort_vecs_by_first;
use crate::utils::compress_explode::explode_vec;
use crate::utils::frame_processing::{lazy_centroid_weighted_frame, PeakArrayRefs};
use crate::utils::sorting::top_n;
use crate::utils::tolerance_ranges::{scan_tol_range, tof_tol_range};
use rayon::prelude::*;
use std::collections::HashMap;
use std::marker::PhantomData;
use std::sync::Arc;
use timsrust::converters::{Scan2ImConverter, Tof2MzConverter};
use timsrust::readers::{FrameReader, FrameReaderError};
use timsrust::{AcquisitionType, Frame, MSLevel, QuadrupoleSettings};

use super::peak_in_quad::PeakInQuad;
use crate::sort_vecs_by_first;
use crate::utils::compress_explode::explode_vec;
use crate::utils::frame_processing::{lazy_centroid_weighted_frame, PeakArrayRefs};
use crate::utils::sorting::top_n;
use crate::utils::tolerance_ranges::{scan_tol_range, tof_tol_range};
use timsrust::{
readers::{FrameReader, FrameReaderError, MetadataReaderError},
TimsRustError,
};
use tracing::instrument;
use tracing::{info, trace, warn};

Expand Down Expand Up @@ -301,14 +298,31 @@ pub fn par_expand_and_arrange_frames(
out
}

#[derive(Debug, Clone, Copy, PartialEq)]
pub struct CentroidingSettings {
ims_tol_pct: f64,
mz_tol_ppm: f64,
window_width: usize,
max_ms1_peaks: usize,
max_ms2_peaks: usize,
}

impl Default for CentroidingSettings {
fn default() -> Self {
CentroidingSettings {
ims_tol_pct: 1.5,
mz_tol_ppm: 15.0,
window_width: 3,
max_ms1_peaks: 100_000,
max_ms2_peaks: 20_000,
}
}
}

#[derive(Debug, Clone, Copy, PartialEq)]
pub enum FrameProcessingConfig {
Centroided {
ims_tol_pct: f64,
mz_tol_ppm: f64,
window_width: usize,
max_ms1_peaks: usize,
max_ms2_peaks: usize,
settings: CentroidingSettings,
ims_converter: Option<Scan2ImConverter>,
mz_converter: Option<Tof2MzConverter>,
},
Expand All @@ -322,33 +336,20 @@ impl FrameProcessingConfig {
mz_converter: Tof2MzConverter,
) -> Self {
match self {
FrameProcessingConfig::Centroided {
ims_tol_pct,
mz_tol_ppm,
window_width,
max_ms1_peaks,
max_ms2_peaks,
..
} => FrameProcessingConfig::Centroided {
ims_tol_pct,
mz_tol_ppm,
window_width,
max_ms1_peaks,
max_ms2_peaks,
ims_converter: Some(ims_converter),
mz_converter: Some(mz_converter),
},
FrameProcessingConfig::Centroided { settings, .. } => {
FrameProcessingConfig::Centroided {
settings,
ims_converter: Some(ims_converter),
mz_converter: Some(mz_converter),
}
}
FrameProcessingConfig::NotCentroided => FrameProcessingConfig::NotCentroided,
}
}

pub fn default_centroided() -> Self {
FrameProcessingConfig::Centroided {
ims_tol_pct: 1.5,
mz_tol_ppm: 15.0,
window_width: 3,
max_ms1_peaks: 100_000,
max_ms2_peaks: 20_000,
settings: Default::default(),
ims_converter: Default::default(),
mz_converter: Default::default(),
}
Expand All @@ -359,33 +360,8 @@ impl FrameProcessingConfig {
}
}

#[derive(Debug)]
pub enum DataReadingError {
CentroidingError(FrameProcessingConfig),
UnsupportedDataError(String),
TimsRustError(TimsRustError), // Why doesnt timsrust error derive clone?
}

impl From<TimsRustError> for DataReadingError {
fn from(e: TimsRustError) -> Self {
DataReadingError::TimsRustError(e)
}
}

impl From<MetadataReaderError> for DataReadingError {
fn from(e: MetadataReaderError) -> Self {
DataReadingError::TimsRustError(TimsRustError::MetadataReaderError(e))
}
}

impl From<FrameReaderError> for DataReadingError {
fn from(e: FrameReaderError) -> Self {
DataReadingError::TimsRustError(TimsRustError::FrameReaderError(e))
}
}

fn warn_and_skip_badframes(
frame_iter: impl ParallelIterator<Item = Result<Frame, FrameReaderError>>,
frame_iter: impl ParallelIterator<Item = std::result::Result<Frame, FrameReaderError>>,
) -> impl ParallelIterator<Item = Frame> {
frame_iter.filter_map(|x| {
// Log the info of the frame that broke ...
Expand All @@ -409,16 +385,11 @@ fn warn_and_skip_badframes(
pub fn par_read_and_expand_frames(
frame_reader: &FrameReader,
centroiding_config: FrameProcessingConfig,
) -> Result<
HashMap<Option<SingleQuadrupoleSetting>, Vec<ExpandedFrameSlice<SortedState>>>,
DataReadingError,
> {
) -> Result<HashMap<Option<SingleQuadrupoleSetting>, Vec<ExpandedFrameSlice<SortedState>>>> {
let dia_windows = match frame_reader.get_dia_windows() {
Some(dia_windows) => dia_windows,
None => {
return Err(DataReadingError::UnsupportedDataError(
"No dia windows found".to_string(),
))
return Err(UnsupportedDataError::NoMS2DataError.into());
}
};

Expand All @@ -430,19 +401,15 @@ pub fn par_read_and_expand_frames(

let expanded_frames = match centroiding_config {
FrameProcessingConfig::Centroided {
ims_tol_pct,
mz_tol_ppm,
window_width,
max_ms1_peaks: _max_ms1_peaks,
max_ms2_peaks,
settings,
ims_converter,
mz_converter,
} => par_expand_and_centroid_frames(
curr_iter,
ims_tol_pct,
mz_tol_ppm,
window_width,
max_ms2_peaks,
settings.ims_tol_pct,
settings.mz_tol_ppm,
settings.window_width,
settings.max_ms2_peaks,
&ims_converter.unwrap(),
&mz_converter.unwrap(),
),
Expand All @@ -464,19 +431,15 @@ pub fn par_read_and_expand_frames(
let ms1_iter = warn_and_skip_badframes(ms1_iter);
let expanded_ms1_frames = match centroiding_config {
FrameProcessingConfig::Centroided {
ims_tol_pct,
mz_tol_ppm,
window_width,
max_ms1_peaks,
max_ms2_peaks: _max_ms2_peaks,
settings,
ims_converter,
mz_converter,
} => par_expand_and_centroid_frames(
ms1_iter,
ims_tol_pct,
mz_tol_ppm,
window_width,
max_ms1_peaks,
settings.ims_tol_pct,
settings.mz_tol_ppm,
settings.window_width,
settings.max_ms1_peaks,
&ims_converter.unwrap(),
&mz_converter.unwrap(),
),
Expand Down
20 changes: 6 additions & 14 deletions src/models/indices/expanded_raw_index/model.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
use crate::errors::Result;
use crate::models::adapters::FragmentIndexAdapter;
use crate::models::elution_group::ElutionGroup;
use crate::models::frames::expanded_frame::{
par_read_and_expand_frames, DataReadingError, ExpandedFrameSlice, FrameProcessingConfig,
SortedState,
par_read_and_expand_frames, ExpandedFrameSlice, FrameProcessingConfig, SortedState,
};
use crate::models::frames::peak_in_quad::PeakInQuad;
use crate::models::frames::raw_peak::RawPeak;
Expand All @@ -19,7 +19,7 @@ use serde::Serialize;
use std::collections::HashMap;
use std::hash::Hash;
use std::time::Instant;
use timsrust::converters::{Frame2RtConverter, Scan2ImConverter, Tof2MzConverter};
use timsrust::converters::{Scan2ImConverter, Tof2MzConverter};
use timsrust::readers::{FrameReader, MetadataReader};
use tracing::info;
use tracing::instrument;
Expand All @@ -29,7 +29,6 @@ pub struct ExpandedRawFrameIndex {
bundled_ms1_frames: ExpandedSliceBundle,
bundled_frames: HashMap<SingleQuadrupoleSettingIndex, ExpandedSliceBundle>,
flat_quad_settings: Vec<SingleQuadrupoleSetting>,
rt_converter: Frame2RtConverter,
pub mz_converter: Tof2MzConverter,
pub im_converter: Scan2ImConverter,
adapter: FragmentIndexAdapter,
Expand All @@ -38,18 +37,15 @@ pub struct ExpandedRawFrameIndex {
#[derive(Debug, Clone)]
pub struct ExpandedSliceBundle {
slices: Vec<ExpandedFrameSlice<SortedState>>,
rts: Vec<f64>,
frame_indices: Vec<usize>,
}

impl ExpandedSliceBundle {
pub fn new(mut slices: Vec<ExpandedFrameSlice<SortedState>>) -> Self {
slices.sort_unstable_by(|a, b| a.rt.partial_cmp(&b.rt).unwrap());
let rts = slices.iter().map(|x| x.rt).collect();
let frame_indices = slices.iter().map(|x| x.frame_index).collect();
Self {
slices,
rts,
frame_indices,
}
}
Expand Down Expand Up @@ -113,21 +109,18 @@ impl ExpandedRawFrameIndex {
}

#[instrument(name = "ExpandedRawFrameIndex::from_path_centroided")]
pub fn from_path_centroided(path: &str) -> Result<Self, DataReadingError> {
pub fn from_path_centroided(path: &str) -> Result<Self> {
let config = FrameProcessingConfig::default_centroided();
Self::from_path_base(path, config)
}

#[instrument(name = "ExpandedRawFrameIndex::from_path")]
pub fn from_path(path: &str) -> Result<Self, DataReadingError> {
pub fn from_path(path: &str) -> Result<Self> {
Self::from_path_base(path, FrameProcessingConfig::NotCentroided)
}

#[instrument(name = "ExpandedRawFrameIndex::from_path_base")]
pub fn from_path_base(
path: &str,
centroid_config: FrameProcessingConfig,
) -> Result<Self, DataReadingError> {
pub fn from_path_base(path: &str, centroid_config: FrameProcessingConfig) -> Result<Self> {
info!(
"Building ExpandedRawFrameIndex from path {} config {:?}",
path, centroid_config,
Expand Down Expand Up @@ -166,7 +159,6 @@ impl ExpandedRawFrameIndex {
bundled_ms1_frames: out_ms1_frames.expect("At least one ms1 frame should be present"),
bundled_frames: out_ms2_frames,
flat_quad_settings,
rt_converter: meta_converters.rt_converter,
mz_converter: meta_converters.mz_converter,
im_converter: meta_converters.im_converter,
adapter,
Expand Down
Loading

0 comments on commit c4cde15

Please sign in to comment.