From 3e0b742493da7d45bdb48542e9b18d98258580e9 Mon Sep 17 00:00:00 2001 From: GeorgWa Date: Tue, 10 Sep 2024 18:10:04 +0200 Subject: [PATCH 1/3] Update GUI for release --- alphadia/constants/default.yaml | 8 +- alphadia/libtransform.py | 2 +- gui/src/renderer/components/ParameterInput.js | 3 +- gui/workflows/PeptideCentric.v1.json | 85 ++++++++----------- 4 files changed, 42 insertions(+), 56 deletions(-) diff --git a/alphadia/constants/default.yaml b/alphadia/constants/default.yaml index 45c3f0ec..14917790 100644 --- a/alphadia/constants/default.yaml +++ b/alphadia/constants/default.yaml @@ -89,9 +89,9 @@ search: target_num_candidates: 2 # target ms1 tolerance in ppm - target_ms1_tolerance: 15 + target_ms1_tolerance: 5 # target ms2 tolerance in ppm - target_ms2_tolerance: 15 + target_ms2_tolerance: 10 # target ion mobility tolerance in 1/K_0 target_mobility_tolerance: 0.0 # default is to optimize automatically # target retention time tolerance in seconds if > 1, or a proportion of the total gradient length if < 1 @@ -143,10 +143,10 @@ search_initial: initial_ms2_tolerance: 30 # initial ion mobility tolerance in 1/K_0 - initial_mobility_tolerance: 0.08 + initial_mobility_tolerance: 0.1 # initial retention time tolerance in seconds if > 1, or a proportion of the total gradient length if < 1 - initial_rt_tolerance: 240 + initial_rt_tolerance: 0.5 selection_config: peak_len_rt: 10. diff --git a/alphadia/libtransform.py b/alphadia/libtransform.py index c35c496e..c10ceb1c 100644 --- a/alphadia/libtransform.py +++ b/alphadia/libtransform.py @@ -328,7 +328,7 @@ def forward(self, input: SpecLibBase) -> SpecLibBase: logging.info(f"Loading PeptDeep models of type {self.peptdeep_model_type}") model_mgr.load_installed_models(self.peptdeep_model_type) - if self.peptdeep_model_path is not None: + if self.peptdeep_model_path is not None and self.peptdeep_model_path != "": if not os.path.exists(self.peptdeep_model_path): raise ValueError( f"PeptDeep model checkpoint folder {self.peptdeep_model_path} does not exist" diff --git a/gui/src/renderer/components/ParameterInput.js b/gui/src/renderer/components/ParameterInput.js index 25376942..bb1a63a8 100644 --- a/gui/src/renderer/components/ParameterInput.js +++ b/gui/src/renderer/components/ParameterInput.js @@ -16,8 +16,7 @@ const SingleFolderSelection = ({parameter, onChange = () => {}}) => { console.log(err); }) } - - const folderName = parameter.replace(/^.*[\\\/]/, '') + const folderName = parameter ? parameter.replace(/^.*[\\\/]/, '') : '' return ( <> diff --git a/gui/workflows/PeptideCentric.v1.json b/gui/workflows/PeptideCentric.v1.json index 31bf8e47..f9b459be 100644 --- a/gui/workflows/PeptideCentric.v1.json +++ b/gui/workflows/PeptideCentric.v1.json @@ -35,13 +35,6 @@ "description": "Number of threads to use for parallel processing.", "type": "integer" }, - { - "id": "reuse_calibration", - "name": "Reuse Calibration", - "value": false, - "description": "AlphaDIA will save the calibration parameters in the project file. If this option is enabled, the calibration parameters will be reused for subsequent searches of the same file.", - "type": "boolean" - }, { "id": "reuse_quant", "name": "Reuse Ion Quantities", @@ -153,7 +146,7 @@ { "id": "max_var_mod_num", "name": "Maximum variable modifications", - "value": 1, + "value": 2, "description": "Variable modifications for in-silico digest. At the moment localisation is not supported. Semicolon separated list \n Example: Oxidation@M;Acetyl@ProteinN-term", "type": "integer" }, @@ -241,7 +234,7 @@ { "id": "peptdeep_model_path", "name": "PeptDeep Model Path", - "value": "", + "value": null, "description": "Select a custom PeptDeep model for library prediction. This can be a DDA or DIA trained model. Please make sure that you use the same instrument type and NCE for prediction as the model was trained on.", "type": "singleFolderSelection" }, @@ -265,6 +258,34 @@ "name": "Search", "hidden": false, "parameters": [ + { + "id": "target_ms1_tolerance", + "name": "MS1 Tolerance", + "value": 5, + "description": "MS1 tolerance in ppm. Search windows are optimized and calibrated during processing. The window is reduced until this tolerance is reached.", + "type": "float" + }, + { + "id": "target_ms2_tolerance", + "name": "MS2 Tolerance", + "value": 10, + "description": "MS2 tolerance in ppm. Search windows are optimized and calibrated during processing. The window is reduced until this tolerance is reached.", + "type": "float" + }, + { + "id": "target_mobility_tolerance", + "name": "Mobility Tolerance", + "value": 0.0, + "description": "Mobility tolerance in 1/K_0. Search windows are optimized and calibrated during processing. The window is reduced until this tolerance is reached. Set to enable automatic optimization.", + "type": "float" + }, + { + "id": "target_rt_tolerance", + "name": "RT Tolerance", + "value": 0.0, + "description": "Retention time tolerance in seconds if greater than 1 or as a proportion of the gradient length if less than 1. Search windows are optimized and calibrated during processing. The window is reduced until this tolerance is reached. Automatic optimization is enabled if set to 0.", + "type": "float" + }, { "id": "channel_filter", "name": "Channel Filter", @@ -293,34 +314,7 @@ "description": "For every precursor in the library a number of top scoring candidates will be extracted. This number is the maximum number of candidates that will be extracted per precursor.", "type": "integer" }, - { - "id": "target_ms1_tolerance", - "name": "MS1 Tolerance", - "value": 5, - "description": "MS1 tolerance in ppm. Search windows are optimized and calibrated during processing. The window is reduced until this tolerance is reached.", - "type": "float" - }, - { - "id": "target_ms2_tolerance", - "name": "MS2 Tolerance", - "value": 10, - "description": "MS2 tolerance in ppm. Search windows are optimized and calibrated during processing. The window is reduced until this tolerance is reached.", - "type": "float" - }, - { - "id": "target_mobility_tolerance", - "name": "Mobility Tolerance", - "value": 0.04, - "description": "Mobility tolerance in 1/K_0. Search windows are optimized and calibrated during processing. The window is reduced until this tolerance is reached.", - "type": "float" - }, - { - "id": "target_rt_tolerance", - "name": "RT Tolerance", - "value": 100, - "description": "Retention time tolerance in seconds if greater than 1 or as a proportion of the gradient length if less than 1. Search windows are optimized and calibrated during processing. The window is reduced until this tolerance is reached.", - "type": "float" - }, + { "id": "quant_window", "name": "Quant window", @@ -332,7 +326,7 @@ { "id": "quant_all", "name": "Use all MS2 observations", - "value": false, + "value": true, "description": "Use all MS2 observations for quantification. If disabled only the best scoring observation is used for quantification. Recommended for synchro-PASEF data.", "type": "boolean" @@ -374,13 +368,6 @@ "heuristic" ] }, - { - "id": "competetive_scoring", - "name": "Competetive Scoring", - "value": true, - "description": "If enabled, only the best scoring candidate per target decoy pair is retained.", - "type": "boolean" - }, { "id": "channel_wise_fdr", "name": "Channel wise FDR", @@ -405,7 +392,7 @@ { "id": "initial_num_candidates", "name": "Number of Candidates", - "value": 2, + "value": 1, "description": "Initial number of candidates to extract per precursor.", "type": "integer" }, @@ -426,14 +413,14 @@ { "id": "initial_mobility_tolerance", "name": "Mobility Tolerance", - "value": 0.08, + "value": 0.1, "description": "Initial mobility tolerance in 1/K_0.", "type": "float" }, { "id": "initial_rt_tolerance", "name": "RT Tolerance", - "value": 240, + "value": 0.5, "description": "Initial retention time tolerance in seconds if greater than 1 or as a proportion of the gradient length if less than 1.", "type": "float" } @@ -445,7 +432,7 @@ "hidden": true, "parameters": [ { - "id": "multiplexed_quant", + "id": "enabled", "name": "Enable Multiplexing", "value": false, "description": "Quantify and score identification across non-isobaric labled channels.", From e9346548598f43c7174d326b24b70a0344519bcf Mon Sep 17 00:00:00 2001 From: GeorgWa Date: Tue, 10 Sep 2024 18:17:53 +0200 Subject: [PATCH 2/3] fix #234 --- gui/src/main/modules/engine.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gui/src/main/modules/engine.js b/gui/src/main/modules/engine.js index c75c3259..4034e8cc 100644 --- a/gui/src/main/modules/engine.js +++ b/gui/src/main/modules/engine.js @@ -263,7 +263,7 @@ class CMDExecutionEngine extends BaseExecutionEngine { "--no-capture-output", "alphadia", "--config", - path.join(workflow.output_directory.path, "config.yaml") + `"${path.join(workflow.output_directory.path, "config.yaml")}"` ] , { env:{...process.env, PATH}, shell: true}); run.pid = run.process.pid @@ -423,7 +423,7 @@ class BundledExecutionEngine extends BaseExecutionEngine { // use binary location as cwd and binary name as command run.process = spawn(prefix + binaryName, ["--config", - path.join(workflow.output_directory.path, "config.yaml") + `"${path.join(workflow.output_directory.path, "config.yaml")}"` ], { env:{...process.env, PATH}, From 25dfebbdb5c0a901b379a1c1a08adef1f4e6177b Mon Sep 17 00:00:00 2001 From: GeorgWa Date: Tue, 10 Sep 2024 18:22:45 +0200 Subject: [PATCH 3/3] fix bug --- alphadia/libtransform.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/alphadia/libtransform.py b/alphadia/libtransform.py index c10ceb1c..2dbd5021 100644 --- a/alphadia/libtransform.py +++ b/alphadia/libtransform.py @@ -324,11 +324,11 @@ def forward(self, input: SpecLibBase) -> SpecLibBase: model_mgr = ModelManager(device=device) # will load other model than default generic - if self.peptdeep_model_type is not None: + if self.peptdeep_model_type: logging.info(f"Loading PeptDeep models of type {self.peptdeep_model_type}") model_mgr.load_installed_models(self.peptdeep_model_type) - if self.peptdeep_model_path is not None and self.peptdeep_model_path != "": + if self.peptdeep_model_path and self.peptdeep_model_path != "": if not os.path.exists(self.peptdeep_model_path): raise ValueError( f"PeptDeep model checkpoint folder {self.peptdeep_model_path} does not exist"