From 5d39fe4f5899c4aa31ad83998165c362f383c375 Mon Sep 17 00:00:00 2001 From: mschwoerer <82171591+mschwoer@users.noreply.github.com> Date: Thu, 12 Sep 2024 16:27:28 +0200 Subject: [PATCH 01/14] remove old release workflow --- .github/workflows/legacy_create_release.yml | 201 ------------------ .github/workflows/legacy_publish_on_pypi.yml | 126 ----------- misc/.bumpversion.cfg | 7 - release/macos/build_backend_macos.sh | 10 - release/macos/build_pkg_macos.sh | 78 ------- .../windows/alphadia_innoinstaller_old.iss | 54 ----- release/windows/build_backend.ps1 | 12 -- release/windows/build_gui.ps1 | 14 -- release/windows/build_installer.ps1 | 4 - 9 files changed, 506 deletions(-) delete mode 100644 .github/workflows/legacy_create_release.yml delete mode 100644 .github/workflows/legacy_publish_on_pypi.yml delete mode 100755 release/macos/build_backend_macos.sh delete mode 100755 release/macos/build_pkg_macos.sh delete mode 100644 release/windows/alphadia_innoinstaller_old.iss delete mode 100644 release/windows/build_backend.ps1 delete mode 100644 release/windows/build_gui.ps1 delete mode 100644 release/windows/build_installer.ps1 diff --git a/.github/workflows/legacy_create_release.yml b/.github/workflows/legacy_create_release.yml deleted file mode 100644 index 161d9898..00000000 --- a/.github/workflows/legacy_create_release.yml +++ /dev/null @@ -1,201 +0,0 @@ -# TODO delete once the new release workflow has run once -on: - workflow_dispatch: - inputs: - commit_to_release: - description: 'Enter commit hash to release (example: ef4037cb571f99cb4919b520fde7174972aae473)' - required: true - tag_to_release: - description: 'Enter tag to release (example: v1.5.5)' - required: true - - -name: LEGACY Create Draft Release - -jobs: - Get_New_Version: - runs-on: ubuntu-latest - outputs: - new_version: ${{ steps.check_release_tag.outputs.new_version }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ inputs.commit_to_release }} - - - name: Check release tag - id: check_release_tag - shell: bash -le {0} - run: | - CURRENT_VERSION=$(./misc/get_current_version.sh) - if [ "v${CURRENT_VERSION}" != "${{ inputs.tag_to_release }}" ]; then - echo Code version "v${CURRENT_VERSION}" does not match the tag to release ${{ inputs.tag_to_release }} - exit 1 - fi - echo "new_version=$CURRENT_VERSION" >> $GITHUB_OUTPUT - - - uses: mukunku/tag-exists-action@v1.6.0 - id: check-tag - with: - tag: ${{ inputs.tag_to_release }} - - - name: Check if tag already exists - run: | - echo "Tag already exists!" - exit 1 - if: steps.check-tag.outputs.exists == 'true' - - - Create_Draft_Release: - runs-on: ubuntu-latest - needs: Get_New_Version - outputs: - upload_url: ${{ steps.draft_release.outputs.upload_url }} - steps: - - name: Draft Release - id: draft_release - # TODO this action is deprecated, replace with https://github.com/ncipollo/release-action - # cf. https://github.com/actions/create-release/issues/119#issuecomment-783010321 - uses: actions/create-release@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - tag_name: ${{ inputs.tag_to_release }} - release_name: ${{ inputs.tag_to_release }} - draft: true - prerelease: false - - Create_MacOS_Installer: - needs: [Create_Draft_Release, Get_New_Version] - env: - ARCH: x64 - EAGER_IMPORT: true - runs-on: macos-latest-xlarge - steps: - - name : Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ inputs.commit_to_release }} - - # Build backend - - name: Install conda - uses: conda-incubator/setup-miniconda@v3 - with: - miniconda-version: "latest" - auto-update-conda: true - activate-environment: alpha - python-version: "3.11" - - - name: Check arm64 - shell: bash -el {0} - run: | - python -c "import platform; print(platform.machine())" - - - name: Build backend - shell: bash -el {0} - run: | - release/macos/build_backend_macos.sh - - - name: Test backend - shell: bash -el {0} - run: | - dist/alphadia/alphadia --version - - # Build GUI - - name: Setup Node.js - uses: actions/setup-node@v4 - - - name: Build GUI - run: | - release/macos/build_gui_macos.sh - - # combine backend and GUI - - name: Build package - shell: bash -el {0} - run: | - release/macos/build_pkg_macos.sh - - - name: List output files - run: | - ls dist - - # Upload the package - - name: Upload a Release Asset - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ needs.Create_Draft_Release.outputs.upload_url }} - asset_path: dist/alphadia-${{ needs.Get_New_Version.outputs.new_version }}-darwin-${{ env.ARCH }}.pkg - asset_name: alphadia-${{ needs.Get_New_Version.outputs.new_version }}-darwin-${{ env.ARCH }}.pkg - asset_content_type: application/zip - - Create_Windows_Installer: - needs: [Create_Draft_Release, Get_New_Version] - env: - ARCH: x64 - runs-on: windows-latest - steps: - - name : Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ inputs.commit_to_release }} - - # Build backend - - name: Install conda - uses: conda-incubator/setup-miniconda@v3 - with: - miniconda-version: "latest" - auto-update-conda: true - activate-environment: alpha - python-version: "3.11" - - - name: Build Backend - shell: powershell - run: | - release/windows/build_backend.ps1 - - - name: Test Backend - shell: powershell - run: | - dist\alphadia\alphadia.exe --version - - # Build GUI - - name: Setup Node.js - uses: actions/setup-node@v4 - - - name: Build GUI - shell: powershell - run: | - release/windows/build_gui.ps1 - - # combine backend and GUI - - name: Check if Innosetup is installed - shell: powershell - run: | - if (-not (Test-Path "C:\Program Files (x86)\Inno Setup 6\ISCC.exe")) { - Write-Host "Inno Setup is not installed" - exit 1 - } - else { - Write-Host "Inno Setup is installed" - } - - - name: Build Installer - shell: powershell - run: | - release/windows/build_installer.ps1 - - - name: List output files - run: | - ls dist - - - name: Upload a Release Asset - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ needs.Create_Draft_Release.outputs.upload_url }} - asset_path: dist/alphadia-${{ needs.Get_New_Version.outputs.new_version }}-win-${{ env.ARCH }}.exe - asset_name: alphadia-${{ needs.Get_New_Version.outputs.new_version }}-win-${{ env.ARCH }}.exe - asset_content_type: application/zip diff --git a/.github/workflows/legacy_publish_on_pypi.yml b/.github/workflows/legacy_publish_on_pypi.yml deleted file mode 100644 index 2efb2470..00000000 --- a/.github/workflows/legacy_publish_on_pypi.yml +++ /dev/null @@ -1,126 +0,0 @@ -# TODO delete once the new release workflow has run once -on: - workflow_dispatch: - inputs: - tag_to_release: - description: 'Enter tag to release (example: v1.5.5)' - required: true - -name: LEGACY Publish on PyPi - -env: - PYTHON_VERSION: "3.11" - -jobs: - Create_PyPi_Release: - runs-on: ubuntu-latest - outputs: - new_version: ${{ steps.get_current_version.outputs.new_version }} - steps: - - uses: actions/checkout@v4 - with: - ref: ${{ inputs.tag_to_release }} - - uses: conda-incubator/setup-miniconda@v3 - with: - miniconda-version: "latest" - auto-update-conda: true - python-version: ${{ env.PYTHON_VERSION }} - - name: Conda info - shell: bash -le {0} - run: conda info - - name: Get current version - id: get_current_version - shell: bash -l {0} - run: | - CURRENT_VERSION=$(./misc/get_current_version.sh) - echo "new_version=$CURRENT_VERSION" >> $GITHUB_OUTPUT - - uses: actions/cache@v4 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements*.txt') }} - - name: Prepare distribution - shell: bash -le {0} - run: | - conda create -n alphadia_build python=${{ env.PYTHON_VERSION }} -y - conda activate alphadia_build - python -m pip install --upgrade pip - pip install build twine - rm -rf dist - rm -rf build - python -m build - twine check dist/* - conda deactivate - conda env remove --name alphadia_build -y - conda clean --all -y - - name: Publish distribution to Test-PyPI - uses: pypa/gh-action-pypi-publish@release/v1 - with: - repository-url: https://test.pypi.org/legacy/ - user: __token__ - password: ${{ secrets.TEST_PYPI_API_TOKEN }} - - name: Test Test-PyPI loose installation - shell: bash -le {0} - run: | - conda create -n pip_loose_test python=${{ env.PYTHON_VERSION }} -y - conda activate pip_loose_test - pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple "alphadia==${{ steps.get_current_version.outputs.new_version }}" - alphadia -v - conda deactivate - conda env remove --name pip_stable_test -y - conda clean --all -y - - name: Test Test-PyPI stable installation - shell: bash -le {0} - run: | - conda create -n pip_stable_test python=${{ env.PYTHON_VERSION }} -y - conda activate pip_stable_test - pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple "alphadia[stable]==${{ steps.get_current_version.outputs.new_version }}" - alphadia -v - conda deactivate - conda env remove --name pip_stable_test -y - conda clean --all -y - - name: Publish distribution to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 - with: - user: __token__ - password: ${{ secrets.PYPI_API_TOKEN }} - Test_PyPi_Release: - name: Test_PyPi_version_on_${{ matrix.os }} - runs-on: ${{ matrix.os }} - needs: Create_PyPi_Release - strategy: - matrix: - os: [ubuntu-latest, macOS-latest, windows-latest] - steps: - - uses: actions/checkout@v4 - - uses: conda-incubator/setup-miniconda@v3 - with: - miniconda-version: "latest" - auto-update-conda: true - python-version: ${{ env.PYTHON_VERSION }} - - name: Conda info - shell: bash -le {0} - run: conda info - - uses: actions/cache@v4 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements*.txt') }} - - name: Test PyPI stable installation - shell: bash -le {0} - run: | - conda create -n pip_stable python=${{ env.PYTHON_VERSION }} -y - conda activate pip_stable - pip install "alphadia[stable]==${{ needs.Create_PyPi_Release.outputs.new_version }}" - alphadia -v - conda deactivate - conda env remove --name pip_stable -y - conda clean --all -y - - name: Test PyPI loose installation - shell: bash -le {0} - run: | - conda create -n pip_loose python=${{ env.PYTHON_VERSION }} -y - conda activate pip_loose - pip install alphadia==${{ needs.Create_PyPi_Release.outputs.new_version }} - alphadia -v - conda deactivate - conda env remove --name pip_loose -y - conda clean --all -y diff --git a/misc/.bumpversion.cfg b/misc/.bumpversion.cfg index f51e7b54..f9a7dadd 100644 --- a/misc/.bumpversion.cfg +++ b/misc/.bumpversion.cfg @@ -29,18 +29,11 @@ serialize = [bumpversion:file:../release/macos/build_package_macos.sh] -[bumpversion:file:../release/macos/build_backend_macos.sh] - -[bumpversion:file:../release/macos/build_pkg_macos.sh] - [bumpversion:file:../release/macos/distribution.xml] [bumpversion:file:../release/windows/alphadia_innoinstaller.iss] -[bumpversion:file:../release/windows/alphadia_innoinstaller_old.iss] - [bumpversion:file:../release/windows/build_installer_windows.ps1] -[bumpversion:file:../release/windows/build_backend.ps1] search = {current_version} replace = {new_version} diff --git a/release/macos/build_backend_macos.sh b/release/macos/build_backend_macos.sh deleted file mode 100755 index ea713320..00000000 --- a/release/macos/build_backend_macos.sh +++ /dev/null @@ -1,10 +0,0 @@ -# TODO remove with old release workflow - -# navigate to the root directory - -python -m build -pip install "dist/alphadia-1.8.0-py3-none-any.whl[stable]" - -# Creating the stand-alone pyinstaller folder -pip install pyinstaller -pyinstaller release/pyinstaller/alphadia.spec -y diff --git a/release/macos/build_pkg_macos.sh b/release/macos/build_pkg_macos.sh deleted file mode 100755 index 1f2a7bd6..00000000 --- a/release/macos/build_pkg_macos.sh +++ /dev/null @@ -1,78 +0,0 @@ -#!/bin/bash -# TODO remove with old release workflow - -# Set up package name and version -PACKAGE_NAME="alphadia" -PACKAGE_VERSION="1.8.0" - -ARCH=$(uname -m) -if [ "$ARCH" == "x86_64" ]; then - ARCH="x64" -fi -echo "ARCH=${ARCH}" >> $GITHUB_ENV - -KERNEL=$(uname -s | tr '[:upper:]' '[:lower:]') - -BUILD_NAME="${PACKAGE_NAME}-${PACKAGE_VERSION}-${KERNEL}-${ARCH}" - -PKG_FOLDER="dist/alphadia.app" - -# Cleanup the package folder -echo "Cleaning up the package folder" - -rm -rf $PKG_FOLDER - -# === Prepare structure === -echo "Preparing package structure" - -mkdir -p $PKG_FOLDER -mkdir -p $PKG_FOLDER/Contents/Resources -mkdir -p $PKG_FOLDER/Contents/MacOS -mkdir -p $PKG_FOLDER/Contents/Frameworks - - - -# === Backend === -echo "Copying backend" - -BACKEND_BUILD="dist/alphadia" - -# Check if the backend build exists, otherwise exit with an error -if [ ! -d "$BACKEND_BUILD" ]; then - echo "Backend build not found at $BACKEND_BUILD" - exit 1 -fi - -# Copy the backend -cp -a ${BACKEND_BUILD}/. $PKG_FOLDER/Contents/Frameworks/ - - -# === GUI === -echo "Copying GUI" - -ls ./gui/out - -# Set the path to the GUI build -GUI_BUILD="./gui/out/alphadia-gui-${KERNEL}-${ARCH}" - -# Check if the GUI build exists, otherwise exit with an error -if [ ! -d "$GUI_BUILD" ]; then - echo "GUI build not found at $GUI_BUILD" - exit 1 -fi - -# Copy the electron forge build -cp -a ${GUI_BUILD}/. $PKG_FOLDER/Contents/Frameworks/ - -# === Resources === -echo "Copying resources" -cp release/logos/alphadia.icns $PKG_FOLDER/Contents/Resources/ -cp release/logos/alphadia.png $PKG_FOLDER/Contents/Resources/ -cp release/macos/alphadia $PKG_FOLDER/Contents/MacOS/ - -cp release/macos/Info.plist $PKG_FOLDER/Contents/ - -#change permissions for entry script -chmod +x $PKG_FOLDER/Contents/MacOS/alphadia - -pkgbuild --root $PKG_FOLDER --identifier de.mpg.biochem.alphadia.app --version $PACKAGE_VERSION --install-location /Applications/alphadia.app --scripts release/macos/scripts dist/$BUILD_NAME.pkg --nopayload diff --git a/release/windows/alphadia_innoinstaller_old.iss b/release/windows/alphadia_innoinstaller_old.iss deleted file mode 100644 index 58f0ef86..00000000 --- a/release/windows/alphadia_innoinstaller_old.iss +++ /dev/null @@ -1,54 +0,0 @@ -; Script generated by the Inno Setup Script Wizard. -; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES! -; # TODO remove with old release workflow - -#define MyAppName "AlphaDIA" -#define MyAppVersion "1.8.0" -#define MyAppPublisher "Max Planck Institute of Biochemistry, Mann Labs" -#define MyAppURL "https://github.com/MannLabs/alphadia" -#define MyAppExeName "alphadia-gui.exe" - -[Setup] -; NOTE: The value of AppId uniquely identifies this application. Do not use the same AppId value in installers for other applications. -; (To generate a new GUID, click Tools | Generate GUID inside the IDE.) -AppId={{alphadia_Mann_Labs_MPI} -AppName={#MyAppName} -AppVersion={#MyAppVersion} -;AppVerName={#MyAppName} {#MyAppVersion} -AppPublisher={#MyAppPublisher} -AppPublisherURL={#MyAppURL} -AppSupportURL={#MyAppURL} -AppUpdatesURL={#MyAppURL} -DefaultDirName={autopf}\{#MyAppName} -DisableProgramGroupPage=yes -LicenseFile=..\..\LICENSE.txt -; Uncomment the following line to run in non administrative install mode (install for current user only.) -PrivilegesRequired=lowest -PrivilegesRequiredOverridesAllowed=dialog -OutputDir=..\..\dist -; example for BUILD_NAME: alphadia-1.9.0-win-x64 -OutputBaseFilename={#GetEnv('BUILD_NAME')} -SetupIconFile=..\logos\alphadia.ico -Compression=lzma -SolidCompression=yes -WizardStyle=modern - -[Languages] -Name: "english"; MessagesFile: "compiler:Default.isl" - -[Tasks] -Name: "desktopicon"; Description: "{cm:CreateDesktopIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked - -[Files] -Source: "..\..\gui\out\alphadia-gui-win32-x64\{#MyAppExeName}"; DestDir: "{app}"; Flags: ignoreversion -Source: "..\..\gui\out\alphadia-gui-win32-x64\*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs createallsubdirs -Source: "..\..\dist\alphadia\*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs createallsubdirs - -; NOTE: Don't use "Flags: ignoreversion" on any shared system files - -[Icons] -Name: "{autoprograms}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}" -Name: "{autodesktop}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; Tasks: desktopicon - -[Run] -Filename: "{app}\{#MyAppExeName}"; Description: "{cm:LaunchProgram,{#StringChange(MyAppName, '&', '&&')}}"; Flags: nowait postinstall skipifsilent diff --git a/release/windows/build_backend.ps1 b/release/windows/build_backend.ps1 deleted file mode 100644 index 82543623..00000000 --- a/release/windows/build_backend.ps1 +++ /dev/null @@ -1,12 +0,0 @@ -# TODO remove with old release workflow - -Remove-Item -Recurse -Force -ErrorAction SilentlyContinue ./build -Remove-Item -Recurse -Force -ErrorAction SilentlyContinue ./dist - -pip install build -python -m build -pip install "dist/alphadia-1.8.0-py3-none-any.whl[stable]" - -# Creating the stand-alone pyinstaller folder -pip install pyinstaller tbb -pyinstaller release/pyinstaller/alphadia.spec -y diff --git a/release/windows/build_gui.ps1 b/release/windows/build_gui.ps1 deleted file mode 100644 index 8503d72f..00000000 --- a/release/windows/build_gui.ps1 +++ /dev/null @@ -1,14 +0,0 @@ -# TODO remove with old release workflow - -# Powershell script to build the GUI for Windows - -cd gui -# delete old build using powershell -Remove-Item -Recurse -Force -ErrorAction SilentlyContinue ./out -Remove-Item -Recurse -Force -ErrorAction SilentlyContinue ./dist - - -npm install -npm run make - -cd .. diff --git a/release/windows/build_installer.ps1 b/release/windows/build_installer.ps1 deleted file mode 100644 index feb8e965..00000000 --- a/release/windows/build_installer.ps1 +++ /dev/null @@ -1,4 +0,0 @@ -# TODO remove with old release workflow - - -& "C:\Program Files (x86)\Inno Setup 6\ISCC.exe" .\release\windows\alphadia_innoinstaller_old.iss From 756483a2f2faa1ae24ab2c8280e688c1c50569da Mon Sep 17 00:00:00 2001 From: GeorgWa Date: Fri, 13 Sep 2024 13:34:24 +0200 Subject: [PATCH 02/14] Bump alphaX dependencies --- requirements/requirements.txt | 4 ++-- requirements/requirements_loose.txt | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 6242facf..31f893c9 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -5,8 +5,8 @@ numba==0.59.1 argparse==1.4.0 alpharaw==0.4.5 alphatims==1.0.8 -alphabase==1.2.5 -peptdeep==1.2.1 +alphabase==1.4.0 # test: tolerate_version +peptdeep==1.3.0 # test: tolerate_version progressbar==2.5 neptune==1.10.4 seaborn==0.13.2 diff --git a/requirements/requirements_loose.txt b/requirements/requirements_loose.txt index e8d16f58..68aa94bf 100644 --- a/requirements/requirements_loose.txt +++ b/requirements/requirements_loose.txt @@ -4,8 +4,8 @@ numba argparse alpharaw>=0.3.1 # test: tolerate_version alphatims -alphabase>=1.2.5 # test: tolerate_version -peptdeep>=1.2.1 # test: tolerate_version +alphabase>=1.4.0 # test: tolerate_version +peptdeep>=1.3.0 # test: tolerate_version progressbar neptune seaborn From 08f0fbc07f25f85bbf8f953268ab375136df4f23 Mon Sep 17 00:00:00 2001 From: GeorgWa Date: Sat, 14 Sep 2024 17:36:45 +0200 Subject: [PATCH 03/14] fix #347 --- alphadia/workflow/optimization.py | 84 ++++++++++++++++------------- alphadia/workflow/peptidecentric.py | 3 +- tests/unit_tests/test_workflow.py | 43 ++++++++++++--- 3 files changed, 85 insertions(+), 45 deletions(-) diff --git a/alphadia/workflow/optimization.py b/alphadia/workflow/optimization.py index 4fc61188..0b742274 100644 --- a/alphadia/workflow/optimization.py +++ b/alphadia/workflow/optimization.py @@ -757,7 +757,9 @@ def __init__(self, library: SpecLibFlat, config: dict): rng = np.random.default_rng(seed=772) rng.shuffle(self.elution_group_order) - self.target_count = self._config["calibration"]["optimization_lock_target"] + self._precursor_target_count = self._config["calibration"][ + "optimization_lock_target" + ] self.batch_idx = 0 self.set_batch_plan() @@ -768,6 +770,27 @@ def __init__(self, library: SpecLibFlat, config: dict): self.feature_dfs = [] self.fragment_dfs = [] + @property + def features_df(self) -> pd.DataFrame: + return pd.concat(self.feature_dfs) + + @property + def fragments_df(self) -> pd.DataFrame: + return pd.concat(self.fragment_dfs) + + @property + def start_idx(self) -> int: + if self.has_target_num_precursors: + return 0 + elif self.batch_idx >= len(self.batch_plan): + raise NoOptimizationLockTargetError() # This should never be triggered since introduction of the BaseOptimizer.proceed_with_insufficient_precursors method and associated code, and could be removed. + else: + return self.batch_plan[self.batch_idx][0] + + @property + def stop_idx(self) -> int: + return self.batch_plan[self.batch_idx][1] + def _get_exponential_batches(self, step): """Get the number of batches for a given step This plan has the shape: @@ -777,7 +800,7 @@ def _get_exponential_batches(self, step): def set_batch_plan(self): """Gets an exponential batch plan based on the batch_size value in the config.""" - n_eg = self._library._precursor_df["elution_group_idx"].nunique() + n_eg = len(self.elution_group_order) plan = [] @@ -794,6 +817,9 @@ def set_batch_plan(self): self.batch_plan = plan + def batches_remaining(self): + return self.batch_idx + 1 > len(self.batch_plan) + def update_with_extraction( self, feature_df: pd.DataFrame, fragment_df: pd.DataFrame ): @@ -822,9 +848,10 @@ def update_with_fdr(self, precursor_df: pd.DataFrame): The precursor dataframe for the current batch of the optimization lock (from workflow.perform_fdr). """ - self.count = len(precursor_df[precursor_df["qval"] < 0.01]) - - self.has_target_num_precursors = self.count >= self.target_count + self._precursor_at_fdr_count = np.sum(precursor_df["qval"] <= 0.01) + self.has_target_num_precursors = ( + self._precursor_at_fdr_count >= self._precursor_target_count + ) def update_with_calibration(self, calibration_manager): """Updates the batch library with the current calibrated values using the calibration manager. @@ -840,7 +867,10 @@ def update_with_calibration(self, calibration_manager): "precursor", ) - calibration_manager.predict(self.batch_library._fragment_df, "fragment") + calibration_manager.predict( + self.batch_library._fragment_df, + "fragment", + ) def increase_batch_idx(self): """If the optimization lock does not contain enough precursors at 1% FDR, the optimization lock proceeds to include the next step in the batch plan in the library attribute. @@ -856,29 +886,28 @@ def decrease_batch_idx(self): batch_plan_diff = np.array( [ - stop_at_given_idx - self.stop_idx * self.target_count / self.count + stop_at_given_idx + - self.stop_idx + * self._precursor_target_count + / self._precursor_at_fdr_count for _, stop_at_given_idx in self.batch_plan ] ) # Calculate the difference between the number of precursors expected at the given idx and the target number of precursors for each idx in the batch plan. - smallest_value = np.min( - batch_plan_diff[batch_plan_diff > 0] - ) # Take the smallest positive difference (i.e. the smallest idx that is expected to yield more than the target number of precursors). - self.batch_idx = np.where(batch_plan_diff == smallest_value)[0][ - 0 - ] # Set the batch idx to the index of the smallest positive difference. + # get index of smallest value >= 0 + self.batch_idx = np.where(batch_plan_diff >= 0)[0][0] def update(self): """Updates the library to use for the next round of optimization, either adjusting it upwards or downwards depending on whether the target has been reached. If the target has been reached, the feature and fragment dataframes are reset """ - if not self.has_target_num_precursors: - self.increase_batch_idx() - - else: + if self.has_target_num_precursors: self.decrease_batch_idx() self.feature_dfs = [] self.fragment_dfs = [] + else: + self.increase_batch_idx() + eg_idxes = self.elution_group_order[self.start_idx : self.stop_idx] self.set_batch_dfs(eg_idxes) @@ -919,24 +948,3 @@ def set_batch_dfs(self, eg_idxes: None | np.ndarray = None): frag_stop_col="flat_frag_stop_idx", ) ) - - @property - def features_df(self) -> pd.DataFrame: - return pd.concat(self.feature_dfs) - - @property - def fragments_df(self) -> pd.DataFrame: - return pd.concat(self.fragment_dfs) - - @property - def start_idx(self) -> int: - if self.has_target_num_precursors: - return 0 - elif self.batch_idx >= len(self.batch_plan): - raise NoOptimizationLockTargetError() # This should never be triggered since introduction of the BaseOptimizer.proceed_with_insufficient_precursors method and associated code, and could be removed. - else: - return self.batch_plan[self.batch_idx][0] - - @property - def stop_idx(self) -> int: - return self.batch_plan[self.batch_idx][1] diff --git a/alphadia/workflow/peptidecentric.py b/alphadia/workflow/peptidecentric.py index 31b8ad1f..86607d14 100644 --- a/alphadia/workflow/peptidecentric.py +++ b/alphadia/workflow/peptidecentric.py @@ -433,9 +433,10 @@ def search_parameter_optimization(self): precursor_df = self._process_batch() if not self.optlock.has_target_num_precursors: - if self.optlock.batch_idx + 1 >= len(self.optlock.batch_plan): + if not self.optlock.batches_remaining(): insufficient_precursors_to_optimize = True break + self.optlock.update() if self.optlock.previously_calibrated: diff --git a/tests/unit_tests/test_workflow.py b/tests/unit_tests/test_workflow.py index 35d7bf2f..cb7e4e64 100644 --- a/tests/unit_tests/test_workflow.py +++ b/tests/unit_tests/test_workflow.py @@ -791,9 +791,9 @@ def test_targeted_mobility_optimizer(): assert workflow.optimization_manager.mobility_error == optimizer.target_parameter -def create_test_library(): +def create_test_library(count=100000): lib = SpecLibFlat() - precursor_idx = np.arange(100000) + precursor_idx = np.arange(count) elution_group_idx = np.concatenate( [np.full(2, i, dtype=int) for i in np.arange(len(precursor_idx) / 2)] ) @@ -843,6 +843,37 @@ def create_test_library_for_indexing(): return lib +def test_optlock_spot_on_target(): + TEST_OPTLOCK_CONFIG = { + "calibration": { + "batch_size": 2000, + "optimization_lock_target": 200, + } + } + + # edge case where the number of precursors is exactly the target + library = create_test_library(2000) + optlock = optimization.OptimizationLock(library, TEST_OPTLOCK_CONFIG) + + assert optlock.start_idx == optlock.batch_plan[0][0] + + feature_df = pd.DataFrame({"elution_group_idx": np.arange(0, 1000)}) + fragment_df = pd.DataFrame({"elution_group_idx": np.arange(0, 10000)}) + + optlock.update_with_extraction(feature_df, fragment_df) + + assert optlock.total_elution_groups == 1000 + precursor_df = pd.DataFrame( + {"qval": np.concatenate([np.full(200, 0.005), np.full(800, 0.05)])} + ) + optlock.update_with_fdr(precursor_df) + optlock.update() + + assert optlock.start_idx == 0 + assert optlock.stop_idx == optlock.batch_plan[0][1] + assert optlock.has_target_num_precursors + + TEST_OPTLOCK_CONFIG = { "calibration": { "batch_size": 8000, @@ -868,8 +899,8 @@ def test_optlock(): ) optlock.update_with_fdr(precursor_df) - assert optlock.has_target_num_precursors is False - assert optlock.previously_calibrated is False + assert not optlock.has_target_num_precursors + assert not optlock.previously_calibrated optlock.update() assert optlock.start_idx == optlock.batch_plan[1][0] @@ -887,8 +918,8 @@ def test_optlock(): optlock.update_with_fdr(precursor_df) - assert optlock.has_target_num_precursors is True - assert optlock.previously_calibrated is False + assert optlock.has_target_num_precursors + assert not optlock.previously_calibrated optlock.update() From 896b0b20ff5fe399621441263a95d88c51233343 Mon Sep 17 00:00:00 2001 From: GeorgWa Date: Sat, 14 Sep 2024 17:39:22 +0200 Subject: [PATCH 04/14] revert fdr rule --- alphadia/workflow/optimization.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/alphadia/workflow/optimization.py b/alphadia/workflow/optimization.py index 0b742274..60c46080 100644 --- a/alphadia/workflow/optimization.py +++ b/alphadia/workflow/optimization.py @@ -848,7 +848,7 @@ def update_with_fdr(self, precursor_df: pd.DataFrame): The precursor dataframe for the current batch of the optimization lock (from workflow.perform_fdr). """ - self._precursor_at_fdr_count = np.sum(precursor_df["qval"] <= 0.01) + self._precursor_at_fdr_count = np.sum(precursor_df["qval"] < 0.01) self.has_target_num_precursors = ( self._precursor_at_fdr_count >= self._precursor_target_count ) From 7fa7f2800348da5efaa625de657c57d6e1dd5d25 Mon Sep 17 00:00:00 2001 From: GeorgWa Date: Sun, 15 Sep 2024 11:32:20 +0200 Subject: [PATCH 05/14] bool bug --- alphadia/workflow/optimization.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/alphadia/workflow/optimization.py b/alphadia/workflow/optimization.py index 60c46080..ffc9dd4f 100644 --- a/alphadia/workflow/optimization.py +++ b/alphadia/workflow/optimization.py @@ -818,7 +818,7 @@ def set_batch_plan(self): self.batch_plan = plan def batches_remaining(self): - return self.batch_idx + 1 > len(self.batch_plan) + return self.batch_idx + 1 < len(self.batch_plan) def update_with_extraction( self, feature_df: pd.DataFrame, fragment_df: pd.DataFrame From 7a1e03eeb6084a07ac85859d3f61320c8e78bdaa Mon Sep 17 00:00:00 2001 From: GeorgWa Date: Sun, 15 Sep 2024 11:34:56 +0200 Subject: [PATCH 06/14] add decoy filter --- alphadia/workflow/optimization.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/alphadia/workflow/optimization.py b/alphadia/workflow/optimization.py index ffc9dd4f..b579e677 100644 --- a/alphadia/workflow/optimization.py +++ b/alphadia/workflow/optimization.py @@ -848,7 +848,9 @@ def update_with_fdr(self, precursor_df: pd.DataFrame): The precursor dataframe for the current batch of the optimization lock (from workflow.perform_fdr). """ - self._precursor_at_fdr_count = np.sum(precursor_df["qval"] < 0.01) + self._precursor_at_fdr_count = np.sum( + (precursor_df["qval"] < 0.01) & (precursor_df["decoy"] == 0) + ) self.has_target_num_precursors = ( self._precursor_at_fdr_count >= self._precursor_target_count ) From bdbc5e1fd36553fa0e4c1001636adc93ae987c40 Mon Sep 17 00:00:00 2001 From: GeorgWa Date: Mon, 16 Sep 2024 12:18:54 +0200 Subject: [PATCH 07/14] perform correctly on empty searches --- alphadia/workflow/optimization.py | 57 ++++++++++++++++++++++------- alphadia/workflow/peptidecentric.py | 5 ++- 2 files changed, 48 insertions(+), 14 deletions(-) diff --git a/alphadia/workflow/optimization.py b/alphadia/workflow/optimization.py index b579e677..d4e6d170 100644 --- a/alphadia/workflow/optimization.py +++ b/alphadia/workflow/optimization.py @@ -71,6 +71,7 @@ def proceed_with_insufficient_precursors(self, precursors_df, fragments_df): ) self._update_history(precursors_df, fragments_df) self._update_workflow() + self.workflow.reporter.log_string( f"Using current optimal value for {self.parameter_name}: {self.workflow.optimization_manager.__dict__[self.parameter_name]:.2f}.", verbosity="warning", @@ -93,6 +94,21 @@ def _update_workflow(): """ pass + @abstractmethod + def _update_history(): + """This method updates the history dataframe with relevant values. + + Parameters + ---------- + precursors_df: pd.DataFrame + The filtered precursor dataframe for the search. + + fragments_df: pd.DataFrame + The filtered fragment dataframe for the search. + + """ + pass + class AutomaticOptimizer(BaseOptimizer): def __init__( @@ -267,6 +283,7 @@ def _propose_new_parameter(self, df: pd.DataFrame): ).ci(df, self.update_percentile_range) def _update_history(self, precursors_df: pd.DataFrame, fragments_df: pd.DataFrame): + print("_update_history", precursors_df, fragments_df) """This method updates the history dataframe with relevant values. Parameters @@ -295,6 +312,7 @@ def _update_history(self, precursors_df: pd.DataFrame, fragments_df: pd.DataFram } ] ) + print(new_row) self.history_df = pd.concat([self.history_df, new_row], ignore_index=True) @property @@ -384,7 +402,7 @@ def _just_converged(self): return min_steps_reached and feature_not_substantially_increased - def _find_index_of_optimum(self): + def _find_index_of_optimum(self) -> int: """Finds the index of the row in the history dataframe with the optimal value of the feature used for optimization. if self._favour_narrower_parameter is False: The index at optimum is the index of the parameter value that maximizes the feature. @@ -394,26 +412,36 @@ def _find_index_of_optimum(self): Returns ------- int - The index of the row with the optimal value of the feature used for optimization. - + The index of the row in the history dataframe with the optimal value of the feature used for optimization. Notes ----- This method may be overwritten in child classes. """ + if len(self.history_df) == 0: + raise ValueError(f"Optimizer: {self.parameter_name} has no history.") + + if len(self.history_df) == 1: + # If there's only one row, return its index + return self.history_df.index[0] + if self._favour_narrower_optimum: # This setting can be useful for optimizing parameters for which many parameter values have similar feature values. maximum_feature_value = self.history_df[self.feature_name].max() - rows_within_thresh_of_max = self.history_df.loc[ - self.history_df[self.feature_name] - > ( - maximum_feature_value - - self._maximum_decrease_from_maximum - * np.abs(maximum_feature_value) - ) + threshold = ( + maximum_feature_value + - self._maximum_decrease_from_maximum * np.abs(maximum_feature_value) + ) + + rows_within_thresh_of_max = self.history_df[ + self.history_df[self.feature_name] > threshold ] - index_of_optimum = rows_within_thresh_of_max["parameter"].idxmin() - return index_of_optimum + + if rows_within_thresh_of_max.empty: + # If no rows meet the threshold, return the index of the max feature value + return self.history_df[self.feature_name].idxmax() + else: + return rows_within_thresh_of_max["parameter"].idxmin() else: return self.history_df[self.feature_name].idxmax() @@ -591,7 +619,10 @@ def plot(self): """See base class""" pass - def _update_workflow(self, new_parameter: float): + def _update_workflow(self): + pass + + def _update_history(self, precursors_df: pd.DataFrame, fragments_df: pd.DataFrame): pass diff --git a/alphadia/workflow/peptidecentric.py b/alphadia/workflow/peptidecentric.py index 86607d14..decd08f3 100644 --- a/alphadia/workflow/peptidecentric.py +++ b/alphadia/workflow/peptidecentric.py @@ -483,6 +483,7 @@ def search_parameter_optimization(self): ) if precursor_df_filtered.shape[0] >= 6: self.recalibration(precursor_df_filtered, fragments_df_filtered) + for optimizers in ordered_optimizers: for optimizer in optimizers: optimizer.proceed_with_insufficient_precursors( @@ -691,7 +692,7 @@ def recalibration(self, precursor_df_filtered, fragments_df_filtered): ) def fdr_correction(self, features_df, df_fragments, version=-1): - return self.fdr_manager.fit_predict( + features_df = self.fdr_manager.fit_predict( features_df, decoy_strategy="precursor_channel_wise" if self.config["fdr"]["channel_wise_fdr"] @@ -704,6 +705,8 @@ def fdr_correction(self, features_df, df_fragments, version=-1): version=version, # neptune_run=self.neptune ) + features_df["qval"] = 0.1 + return features_df def extract_batch( self, batch_precursor_df, batch_fragment_df=None, apply_cutoff=False From f61c57654973ea73b20c1e07158225779a019e54 Mon Sep 17 00:00:00 2001 From: GeorgWa Date: Mon, 16 Sep 2024 13:09:59 +0200 Subject: [PATCH 08/14] enabling empty output --- alphadia/outputtransform.py | 28 +++++++++++++++++++++++----- alphadia/workflow/optimization.py | 2 -- alphadia/workflow/peptidecentric.py | 5 +---- tests/unit_tests/test_workflow.py | 22 ++++++++++++++++++++++ 4 files changed, 46 insertions(+), 11 deletions(-) diff --git a/alphadia/outputtransform.py b/alphadia/outputtransform.py index b4184e60..ee422dfe 100644 --- a/alphadia/outputtransform.py +++ b/alphadia/outputtransform.py @@ -262,6 +262,10 @@ def lfq( _intensity_df.sort_values(by=group_column, inplace=True, ignore_index=True) + _intensity_df.to_csv( + "/Users/georgwallmann/Downloads/intensity_df.tsv", sep="\t", index=False + ) + lfq_df = lfqutils.index_and_log_transform_input_df(_intensity_df) lfq_df = lfqutils.remove_allnan_rows_input_df(lfq_df) @@ -813,6 +817,16 @@ def build_lfq_tables( group_column=group, ) + # remove all rows for testing + group_intensity_df = group_intensity_df.iloc[0:0] + + if len(group_intensity_df) == 0: + logger.warning( + f"No fragments found for {group_nice}, skipping label-free quantification" + ) + lfq_df = pd.DataFrame(columns=["pg", "intensity"]) + continue + lfq_df = qb.lfq( group_intensity_df, quality_df, @@ -834,11 +848,11 @@ def build_lfq_tables( file_format=self.config["search_output"]["file_format"], ) - protein_df_melted = lfq_df.melt( - id_vars="pg", var_name="run", value_name="intensity" - ) - - psm_df = psm_df.merge(protein_df_melted, on=["pg", "run"], how="left") + if len(lfq_df) > 0 and len(lfq_df.columns) > 2: + protein_df_melted = lfq_df.melt( + id_vars="pg", var_name="run", value_name="intensity" + ) + psm_df = psm_df.merge(protein_df_melted, on=["pg", "run"], how="left") if save: logger.info("Writing psm output to disk") @@ -873,6 +887,10 @@ def build_library( psm_df = self.load_precursor_table() psm_df = psm_df[psm_df["decoy"] == 0] + if len(psm_df) == 0: + logger.warning("No precursors found, skipping library building") + return + libbuilder = libtransform.MbrLibraryBuilder( fdr=0.01, ) diff --git a/alphadia/workflow/optimization.py b/alphadia/workflow/optimization.py index d4e6d170..a446a87b 100644 --- a/alphadia/workflow/optimization.py +++ b/alphadia/workflow/optimization.py @@ -283,7 +283,6 @@ def _propose_new_parameter(self, df: pd.DataFrame): ).ci(df, self.update_percentile_range) def _update_history(self, precursors_df: pd.DataFrame, fragments_df: pd.DataFrame): - print("_update_history", precursors_df, fragments_df) """This method updates the history dataframe with relevant values. Parameters @@ -312,7 +311,6 @@ def _update_history(self, precursors_df: pd.DataFrame, fragments_df: pd.DataFram } ] ) - print(new_row) self.history_df = pd.concat([self.history_df, new_row], ignore_index=True) @property diff --git a/alphadia/workflow/peptidecentric.py b/alphadia/workflow/peptidecentric.py index decd08f3..7e6b8f79 100644 --- a/alphadia/workflow/peptidecentric.py +++ b/alphadia/workflow/peptidecentric.py @@ -692,7 +692,7 @@ def recalibration(self, precursor_df_filtered, fragments_df_filtered): ) def fdr_correction(self, features_df, df_fragments, version=-1): - features_df = self.fdr_manager.fit_predict( + return self.fdr_manager.fit_predict( features_df, decoy_strategy="precursor_channel_wise" if self.config["fdr"]["channel_wise_fdr"] @@ -705,8 +705,6 @@ def fdr_correction(self, features_df, df_fragments, version=-1): version=version, # neptune_run=self.neptune ) - features_df["qval"] = 0.1 - return features_df def extract_batch( self, batch_precursor_df, batch_fragment_df=None, apply_cutoff=False @@ -1065,7 +1063,6 @@ def requantify(self, psm_df): target_channels = [ int(c) for c in self.config["multiplexing"]["target_channels"].split(",") ] - print("target_channels", target_channels) reference_channel = self.config["multiplexing"]["reference_channel"] psm_df = self.fdr_manager.fit_predict( diff --git a/tests/unit_tests/test_workflow.py b/tests/unit_tests/test_workflow.py index cb7e4e64..161d0494 100644 --- a/tests/unit_tests/test_workflow.py +++ b/tests/unit_tests/test_workflow.py @@ -518,6 +518,28 @@ def test_automatic_ms2_optimizer(): assert workflow.optimization_manager.classifier_version == 2 +@pytest.mark.parametrize("favour_narrower_optimum", [True, False]) +def test_automatic_ms2_optimizer_no_convergence(favour_narrower_optimum): + workflow = create_workflow_instance() + + calibration_test_df1 = calibration_testdata() + calibration_test_df2 = calibration_testdata() + + workflow.calibration_manager.fit(calibration_test_df2, "fragment", plot=False) + + ms2_optimizer = optimization.AutomaticMS2Optimizer( + 100, + workflow, + ) + ms2_optimizer._favour_narrower_optimum = favour_narrower_optimum + ms2_optimizer.proceed_with_insufficient_precursors( + calibration_test_df1, calibration_test_df2 + ) + + assert ms2_optimizer.has_converged is False + assert len(ms2_optimizer.history_df) == 1 + + def test_automatic_rt_optimizer(): workflow = create_workflow_instance() From 52bb441b549afe76ec0ce2bdb2b8c307e3c69d18 Mon Sep 17 00:00:00 2001 From: GeorgWa Date: Mon, 16 Sep 2024 13:11:50 +0200 Subject: [PATCH 09/14] remove debugging statement --- alphadia/outputtransform.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/alphadia/outputtransform.py b/alphadia/outputtransform.py index ee422dfe..59dda775 100644 --- a/alphadia/outputtransform.py +++ b/alphadia/outputtransform.py @@ -262,10 +262,6 @@ def lfq( _intensity_df.sort_values(by=group_column, inplace=True, ignore_index=True) - _intensity_df.to_csv( - "/Users/georgwallmann/Downloads/intensity_df.tsv", sep="\t", index=False - ) - lfq_df = lfqutils.index_and_log_transform_input_df(_intensity_df) lfq_df = lfqutils.remove_allnan_rows_input_df(lfq_df) From 26b7d86b17985e45d297f9c41c5a34e8ce14f55b Mon Sep 17 00:00:00 2001 From: GeorgWa Date: Mon, 16 Sep 2024 14:59:44 +0200 Subject: [PATCH 10/14] implement changes --- alphadia/outputtransform.py | 51 ++++++++++++++++--------------- tests/unit_tests/test_workflow.py | 17 ++++++++--- 2 files changed, 39 insertions(+), 29 deletions(-) diff --git a/alphadia/outputtransform.py b/alphadia/outputtransform.py index 59dda775..504e14a4 100644 --- a/alphadia/outputtransform.py +++ b/alphadia/outputtransform.py @@ -764,16 +764,12 @@ def build_lfq_tables( Parameters ---------- - folder_list: List[str] List of folders containing the search outputs - psm_df: Union[pd.DataFrame, None] Combined precursor table. If None, the precursor table is loaded from disk. - save: bool Save the precursor table to disk - """ logger.progress("Performing label free quantification") @@ -785,22 +781,26 @@ def build_lfq_tables( intensity_df, quality_df = qb.accumulate_frag_df_from_folders(folder_list) - group_list = [] - group_nice_list = [] - - if self.config["search_output"]["peptide_level_lfq"]: - group_list.append("mod_seq_hash") - group_nice_list.append("peptide") + group_configs = [ + ( + "mod_seq_hash", + "peptide", + self.config["search_output"]["peptide_level_lfq"], + ), + ( + "mod_seq_charge_hash", + "precursor", + self.config["search_output"]["precursor_level_lfq"], + ), + ("pg", "pg", True), # Always process protein group level + ] - if self.config["search_output"]["precursor_level_lfq"]: - group_list.append("mod_seq_charge_hash") - group_nice_list.append("precursor") + lfq_results = {} - group_list.append("pg") - group_nice_list.append("pg") + for group, group_nice, should_process in group_configs: + if not should_process: + continue - # IMPORTANT: 'pg' has to be the last group in the list as this will be reused - for group, group_nice in zip(group_list, group_nice_list, strict=True): logger.progress( f"Performing label free quantification on the {group_nice} level" ) @@ -813,14 +813,11 @@ def build_lfq_tables( group_column=group, ) - # remove all rows for testing - group_intensity_df = group_intensity_df.iloc[0:0] - if len(group_intensity_df) == 0: logger.warning( f"No fragments found for {group_nice}, skipping label-free quantification" ) - lfq_df = pd.DataFrame(columns=["pg", "intensity"]) + lfq_results[group_nice] = pd.DataFrame() continue lfq_df = qb.lfq( @@ -835,17 +832,21 @@ def build_lfq_tables( group_column=group, ) + lfq_results[group_nice] = lfq_df + if save: logger.info(f"Writing {group_nice} output to disk") - write_df( lfq_df, os.path.join(self.output_folder, f"{group_nice}.matrix"), file_format=self.config["search_output"]["file_format"], ) - if len(lfq_df) > 0 and len(lfq_df.columns) > 2: - protein_df_melted = lfq_df.melt( + # Use protein group (pg) results for merging with psm_df + pg_lfq_df = lfq_results.get("pg", pd.DataFrame()) + + if len(pg_lfq_df) > 0: + protein_df_melted = pg_lfq_df.melt( id_vars="pg", var_name="run", value_name="intensity" ) psm_df = psm_df.merge(protein_df_melted, on=["pg", "run"], how="left") @@ -858,7 +859,7 @@ def build_lfq_tables( file_format=self.config["search_output"]["file_format"], ) - return lfq_df + return lfq_results def build_library( self, diff --git a/tests/unit_tests/test_workflow.py b/tests/unit_tests/test_workflow.py index 161d0494..2b3bb77e 100644 --- a/tests/unit_tests/test_workflow.py +++ b/tests/unit_tests/test_workflow.py @@ -886,7 +886,10 @@ def test_optlock_spot_on_target(): assert optlock.total_elution_groups == 1000 precursor_df = pd.DataFrame( - {"qval": np.concatenate([np.full(200, 0.005), np.full(800, 0.05)])} + { + "qval": np.concatenate([np.full(200, 0.005), np.full(800, 0.05)]), + "decoy": np.zeros(1000), + } ) optlock.update_with_fdr(precursor_df) optlock.update() @@ -917,7 +920,10 @@ def test_optlock(): assert optlock.total_elution_groups == 1000 precursor_df = pd.DataFrame( - {"qval": np.concatenate([np.full(100, 0.005), np.full(1000, 0.05)])} + { + "qval": np.concatenate([np.full(100, 0.005), np.full(1000, 0.05)]), + "decoy": np.zeros(1100), + } ) optlock.update_with_fdr(precursor_df) @@ -935,7 +941,10 @@ def test_optlock(): assert optlock.total_elution_groups == 2000 precursor_df = pd.DataFrame( - {"qval": np.concatenate([np.full(200, 0.005), np.full(1000, 0.05)])} + { + "qval": np.concatenate([np.full(200, 0.005), np.full(1000, 0.05)]), + "decoy": np.zeros(1200), + } ) optlock.update_with_fdr(precursor_df) @@ -964,7 +973,7 @@ def test_optlock_batch_idx(): optlock.update() assert optlock.start_idx == 2000 - precursor_df = pd.DataFrame({"qval": np.full(4500, 0.005)}) + precursor_df = pd.DataFrame({"qval": np.full(4500, 0.005), "decoy": np.zeros(4500)}) optlock.update_with_fdr(precursor_df) From 67501dafc3af9dbf160ef2de9b38b7a5747ec9f9 Mon Sep 17 00:00:00 2001 From: GeorgWa Date: Mon, 16 Sep 2024 16:16:55 +0200 Subject: [PATCH 11/14] fix seed, make test more predictable --- tests/unit_tests/test_workflow.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/unit_tests/test_workflow.py b/tests/unit_tests/test_workflow.py index 2b3bb77e..1e289b13 100644 --- a/tests/unit_tests/test_workflow.py +++ b/tests/unit_tests/test_workflow.py @@ -122,10 +122,11 @@ def test_calibration_manager_init(): def calibration_testdata(): + np.random.seed(42) # create some test data and make sure estimation works mz_library = np.linspace(100, 1000, 1000) mz_observed = ( - mz_library + np.random.normal(0, 0.001, 1000) + mz_library * 0.00001 + 0.005 + mz_library + np.random.normal(0, 0.0001, 1000) + mz_library * 0.00001 + 0.005 ) rt_library = np.linspace(0, 100, 1000) From baed879a9bbf1400727671ed2be86e9466ea8362 Mon Sep 17 00:00:00 2001 From: GeorgWa Date: Mon, 16 Sep 2024 16:44:17 +0200 Subject: [PATCH 12/14] =?UTF-8?q?Bump=20version:=201.8.0=20=E2=86=92=201.8?= =?UTF-8?q?.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- alphadia/__init__.py | 2 +- docs/index.md | 2 +- gui/package.json | 2 +- gui/src/main/modules/profile.js | 2 +- misc/.bumpversion.cfg | 3 +-- release/linux/build_installer_linux.sh | 2 +- release/linux/control | 2 +- release/macos/build_installer_macos.sh | 2 +- release/macos/build_package_macos.sh | 4 ++-- release/macos/distribution.xml | 2 +- release/macos/info.plist | 4 ++-- release/windows/alphadia_innoinstaller.iss | 4 ++-- release/windows/build_installer_windows.ps1 | 2 +- 13 files changed, 16 insertions(+), 17 deletions(-) diff --git a/alphadia/__init__.py b/alphadia/__init__.py index c2f02751..38b3eae3 100644 --- a/alphadia/__init__.py +++ b/alphadia/__init__.py @@ -1,3 +1,3 @@ #!python -__version__ = "1.8.0" +__version__ = "1.8.1" diff --git a/docs/index.md b/docs/index.md index 818e5ed6..89c09b0d 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,6 +1,6 @@ # AlphaDIA Documentation -**Version:** 1.8.0 | [Github](https://github.com/MannLabs/alphadia) +**Version:** 1.8.1 | [Github](https://github.com/MannLabs/alphadia) Open-source DIA search engine built with the alphaX ecosystem. Built with [alpharaw](https://github.com/MannLabs/alpharaw) and [alphatims](https://github.com/MannLabs/alphatims) for raw file acces. Spectral libraries are predicted with [peptdeep](https://github.com/MannLabs/alphapeptdeep) and managed by [alphabase](https://github.com/MannLabs/alphabase). Quantification is powered by [directLFQ](https://github.com/MannLabs/directLFQ). diff --git a/gui/package.json b/gui/package.json index 4856a5c2..8275c4dd 100644 --- a/gui/package.json +++ b/gui/package.json @@ -1,7 +1,7 @@ { "name": "alphadia", "productName": "alphadia-gui", - "version": "1.8.0", + "version": "1.8.1", "description": "Graphical user interface for DIA data analysis", "main": "dist/electron.js", "homepage": "./", diff --git a/gui/src/main/modules/profile.js b/gui/src/main/modules/profile.js index 5b9c864a..33c849d6 100644 --- a/gui/src/main/modules/profile.js +++ b/gui/src/main/modules/profile.js @@ -3,7 +3,7 @@ const path = require("path") const { app, shell, BrowserWindow} = require("electron") const { dialog } = require('electron') -const VERSION = "1.8.0" +const VERSION = "1.8.1" const Profile = class { diff --git a/misc/.bumpversion.cfg b/misc/.bumpversion.cfg index f9a7dadd..3db3608b 100644 --- a/misc/.bumpversion.cfg +++ b/misc/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.8.0 +current_version = 1.8.1 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-(?P[a-z]+)(?P\d+))? @@ -34,6 +34,5 @@ serialize = [bumpversion:file:../release/windows/alphadia_innoinstaller.iss] [bumpversion:file:../release/windows/build_installer_windows.ps1] - search = {current_version} replace = {new_version} diff --git a/release/linux/build_installer_linux.sh b/release/linux/build_installer_linux.sh index 80c88012..9d938ceb 100755 --- a/release/linux/build_installer_linux.sh +++ b/release/linux/build_installer_linux.sh @@ -10,7 +10,7 @@ rm -rf dist build *.egg-info rm -rf dist_pyinstaller build_pyinstaller python -m build -pip install "dist/alphadia-1.8.0-py3-none-any.whl[stable]" +pip install "dist/alphadia-1.8.1-py3-none-any.whl[stable]" if [ "${CPU_OR_GPU}" != "GPU" ]; then pip install torch -U --extra-index-url https://download.pytorch.org/whl/cpu diff --git a/release/linux/control b/release/linux/control index 50796ad7..472a536b 100644 --- a/release/linux/control +++ b/release/linux/control @@ -1,5 +1,5 @@ Package: alphadia -Version: 1.8.0 +Version: 1.8.1 Architecture: all Maintainer: Mann Labs Description: alphadia diff --git a/release/macos/build_installer_macos.sh b/release/macos/build_installer_macos.sh index f529f977..2bb739db 100755 --- a/release/macos/build_installer_macos.sh +++ b/release/macos/build_installer_macos.sh @@ -10,7 +10,7 @@ rm -rf dist_pyinstaller build_pyinstaller export EAGER_IMPORT=true # TODO check if this can be removed with newset peptdeep version w/out transformer dependenc python -m build -pip install "dist/alphadia-1.8.0-py3-none-any.whl[stable]" +pip install "dist/alphadia-1.8.1-py3-none-any.whl[stable]" # Creating the stand-alone pyinstaller folder pyinstaller release/pyinstaller/alphadia.spec --distpath dist_pyinstaller --workpath build_pyinstaller -y diff --git a/release/macos/build_package_macos.sh b/release/macos/build_package_macos.sh index be8ce536..f683fbdf 100755 --- a/release/macos/build_package_macos.sh +++ b/release/macos/build_package_macos.sh @@ -7,10 +7,10 @@ set -e -u # Set up package name and version PACKAGE_NAME="alphadia" APP_NAME="alphadia" -PACKAGE_VERSION="1.8.0" +PACKAGE_VERSION="1.8.1" PKG_FOLDER="dist/$APP_NAME.app" -# BUILD_NAME is taken from environment variables, e.g. alphadia-1.8.0-macos-darwin-arm64 or alphadia-1.8.0-macos-darwin-x64 +# BUILD_NAME is taken from environment variables, e.g. alphadia-1.8.1-macos-darwin-arm64 or alphadia-1.8.1-macos-darwin-x64 rm -rf ${BUILD_NAME}.pkg # Cleanup the package folder diff --git a/release/macos/distribution.xml b/release/macos/distribution.xml index 12e28534..ffc2ff88 100644 --- a/release/macos/distribution.xml +++ b/release/macos/distribution.xml @@ -1,6 +1,6 @@ - AlphaDIA 1.8.0 + AlphaDIA 1.8.1 diff --git a/release/macos/info.plist b/release/macos/info.plist index 87ae0263..c7758b5f 100644 --- a/release/macos/info.plist +++ b/release/macos/info.plist @@ -9,9 +9,9 @@ CFBundleIconFile alphadia.icns CFBundleIdentifier - alphadia.1.8.0 + alphadia.1.8.1 CFBundleShortVersionString - 1.8.0 + 1.8.1 CFBundleInfoDictionaryVersion 6.0 CFBundleName diff --git a/release/windows/alphadia_innoinstaller.iss b/release/windows/alphadia_innoinstaller.iss index 8191350e..ec96f38b 100644 --- a/release/windows/alphadia_innoinstaller.iss +++ b/release/windows/alphadia_innoinstaller.iss @@ -5,7 +5,7 @@ ; so all paths are given relative to the location of this .iss file. #define MyAppName "AlphaDIA" -#define MyAppVersion "1.8.0" +#define MyAppVersion "1.8.1" #define MyAppPublisher "Max Planck Institute of Biochemistry, Mann Labs" #define MyAppURL "https://github.com/MannLabs/alphadia" #define MyAppExeName "alphadia-gui.exe" @@ -29,7 +29,7 @@ PrivilegesRequired=lowest PrivilegesRequiredOverridesAllowed=dialog ; release workflow expects artifact at root of repository OutputDir=..\..\ -; example for BUILD_NAME: alphadia-1.8.0-win-x64 +; example for BUILD_NAME: alphadia-1.8.1-win-x64 OutputBaseFilename={#GetEnv('BUILD_NAME')} SetupIconFile=..\logos\alphadia.ico Compression=lzma diff --git a/release/windows/build_installer_windows.ps1 b/release/windows/build_installer_windows.ps1 index e950bb12..62f671a5 100644 --- a/release/windows/build_installer_windows.ps1 +++ b/release/windows/build_installer_windows.ps1 @@ -5,7 +5,7 @@ Remove-Item -Recurse -Force -ErrorAction SilentlyContinue ./build Remove-Item -Recurse -Force -ErrorAction SilentlyContinue ./dist python -m build -pip install "dist/alphadia-1.8.0-py3-none-any.whl[stable]" +pip install "dist/alphadia-1.8.1-py3-none-any.whl[stable]" # Creating the stand-alone pyinstaller folder pip install tbb==2021.13.1 From 4157b65a561c13c908f0b24d5442c05fd5158ca7 Mon Sep 17 00:00:00 2001 From: GeorgWa Date: Mon, 16 Sep 2024 16:59:09 +0200 Subject: [PATCH 13/14] change default modlayout --- alphadia/constants/default.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/alphadia/constants/default.yaml b/alphadia/constants/default.yaml index 14917790..3a174b5a 100644 --- a/alphadia/constants/default.yaml +++ b/alphadia/constants/default.yaml @@ -23,7 +23,7 @@ library_prediction: predict: False enzyme: trypsin fixed_modifications: 'Carbamidomethyl@C' - variable_modifications: 'Oxidation@M;Acetyl@Protein N-term' + variable_modifications: 'Oxidation@M;Acetyl@Protein_N-term' max_var_mod_num: 2 missed_cleavages: 1 precursor_len: From 0b3c83ebbfa2c43bd8e9500a14853e3e8b326e4d Mon Sep 17 00:00:00 2001 From: GeorgWa Date: Mon, 16 Sep 2024 17:01:36 +0200 Subject: [PATCH 14/14] update in gui --- gui/workflows/PeptideCentric.v1.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gui/workflows/PeptideCentric.v1.json b/gui/workflows/PeptideCentric.v1.json index f9b459be..6a7a0d67 100644 --- a/gui/workflows/PeptideCentric.v1.json +++ b/gui/workflows/PeptideCentric.v1.json @@ -139,7 +139,7 @@ { "id": "variable_modifications", "name": "Variable modifications", - "value": "Oxidation@M;Acetyl@Protein N-term", + "value": "Oxidation@M;Acetyl@Protein_N-term", "description": "Variable modifications for in-silico digest. At the moment localisation is not supported. Semicolon separated list \n Example: Oxidation@M;Acetyl@ProteinN-term", "type": "string" },