Skip to content

Commit

Permalink
review comments
Browse files Browse the repository at this point in the history
  • Loading branch information
odespard committed Aug 28, 2024
1 parent 96a1aeb commit 584b76c
Show file tree
Hide file tree
Showing 3 changed files with 35 additions and 23 deletions.
10 changes: 5 additions & 5 deletions alphadia/workflow/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import os

# alpha family imports
from alphabase.spectral_library.base import SpecLibBase
from alphabase.spectral_library.base import SpecLibBase, SpecLibFlat

# alphadia imports
from alphadia.data import alpharaw, bruker
Expand Down Expand Up @@ -139,22 +139,22 @@ def config(self) -> dict:
return self._config

@property
def calibration_manager(self) -> str:
def calibration_manager(self) -> manager.CalibrationManager:
"""Calibration manager for the workflow. Owns the RT, IM, MZ calibration and the calibration data"""
return self._calibration_manager

@property
def optimization_manager(self) -> str:
def optimization_manager(self) -> manager.OptimizationManager:
"""Optimization manager for the workflow. Owns the optimization data"""
return self._optimization_manager

@property
def timing_manager(self) -> str:
def timing_manager(self) -> manager.TimingManager:
"""Optimization manager for the workflow. Owns the timing data"""
return self._timing_manager

@property
def spectral_library(self) -> SpecLibBase:
def spectral_library(self) -> SpecLibFlat:
"""Spectral library for the workflow. Owns the spectral library data"""
return self._spectral_library

Expand Down
6 changes: 0 additions & 6 deletions alphadia/workflow/optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -484,12 +484,6 @@ def __init__(
self.estimator_group_name = "precursor"
self.estimator_name = "rt"
self.feature_name = "precursor_proportion_detected"
self.maximal_decrease = workflow.config["optimization"]["rt_error"][
"maximal_decrease"
]
self.minimum_proportion_of_maximum = workflow.config["optimization"][
"rt_error"
]["minimum_proportion_of_maximum"]
super().__init__(initial_parameter, workflow, reporter)

def _get_feature_value(
Expand Down
42 changes: 30 additions & 12 deletions alphadia/workflow/peptidecentric.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@ def __init__(
instance_name,
config,
)
self.optlock = None

def load(
self,
Expand Down Expand Up @@ -419,7 +420,7 @@ def search_parameter_optimization(self):

log_string(f"Starting optimization step {current_step}.")

precursor_df = self.process_batch()
precursor_df = self._process_batch()

if not self.optlock.has_target_num_precursors:
self.optlock.update()
Expand All @@ -440,10 +441,10 @@ def search_parameter_optimization(self):

if not self.optlock.previously_calibrated: # Updates classifier but does not optimize the first time the target is reached.
# Optimization is more stable when done with calibrated values.
self.initiate_search_parameter_optimization()
self._initiate_search_parameter_optimization()
continue

self.step_all_optimizers(
self._step_all_optimizers(
optimizers, precursor_df_filtered, fragments_df_filtered
)

Expand Down Expand Up @@ -472,7 +473,8 @@ def search_parameter_optimization(self):

self.save_managers()

def process_batch(self):
def _process_batch(self):
"""Extracts precursors and fragments from the spectral library, performs FDR correction and logs the precursor dataframe."""
self.reporter.log_string(
f"=== Extracting elution groups {self.optlock.start_idx} to {self.optlock.stop_idx} ===",
verbosity="progress",
Expand Down Expand Up @@ -505,7 +507,8 @@ def process_batch(self):

return precursor_df

def initiate_search_parameter_optimization(self):
def _initiate_search_parameter_optimization(self):
"""Saves the classifier version just before search parameter optimization begins and updates the optimization lock to show that calibration has been performed."""
self.optlock.previously_calibrated = True
self.optimization_manager.fit(
{"classifier_version": self.fdr_manager.current_version}
Expand All @@ -515,25 +518,40 @@ def initiate_search_parameter_optimization(self):
verbosity="progress",
)

def step_all_optimizers(
self, optimizers, precursor_df_filtered, fragments_df_filtered
def _step_all_optimizers(
self,
optimizers: list,
precursor_df_filtered: pd.DataFrame,
fragments_df_filtered: pd.DataFrame,
):
"""All optimizers currently in use are stepped and their current state is logged.
Parameters
----------
optimizers : list
List of optimizers to be stepped.
precursor_df_filtered : pd.DataFrame
Filtered precursor dataframe (see filter_dfs).
fragments_df_filtered : pd.DataFrame
Filtered fragment dataframe (see filter_dfs).
"""
self.reporter.log_string(
"=== checking if optimization conditions were reached ===",
)

for optimizer in optimizers:
optimizer.step(precursor_df_filtered, fragments_df_filtered)
self.reporter.log_string(
f"=== Optimization of {optimizer.parameter_name} has been performed {optimizer.num_prev_optimizations} times; minimum number is {self.config['calibration']['min_steps']} ===",
verbosity="progress",
)

self.reporter.log_string(
"==============================================",
)

self.reporter.log_string(
f"=== Optimization has been performed {optimizer.num_prev_optimizations} times; minimum number is {self.config['calibration']['min_steps']} ===",
verbosity="progress",
)

def filter_dfs(self, precursor_df, fragments_df):
"""Filters precursor and fragment dataframes to extract the most reliable examples for calibration.
Expand Down

0 comments on commit 584b76c

Please sign in to comment.