From 432de3211977508506b6d6defd89468a436cd17c Mon Sep 17 00:00:00 2001 From: Lorenzo Mammana Date: Mon, 18 Sep 2023 16:30:13 +0200 Subject: [PATCH] feat: Upgrade anomalib, improve efficient ad configuration * build: Bump version 1.2.1 -> 1.2.2 * build: Upgrade quadra requirement * refactor: Change efficient ad configuration after after anomalib upgrade * refactor: Update imagenette dir * test: Add tests for efficient_ad model export * docs: Update changelog * test: Fix wrong parameter for config Approved By: @rcmalli @AlessandroPolidori --- CHANGELOG.md | 10 ++++++++++ pyproject.toml | 14 ++++++------- quadra/__init__.py | 2 +- .../configs/model/anomalib/efficient_ad.yaml | 4 ++-- quadra/utils/tests/fixtures/models/anomaly.py | 20 +++++++++++++++++++ tests/models/test_export.py | 8 +++++++- tests/tasks/test_anomaly.py | 2 +- 7 files changed, 48 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c6edabc9..1db7aa6e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,16 @@ # Changelog All notable changes to this project will be documented in this file. +### [1.2.2] + +#### Added + +- Add tests for efficient ad model export. +#### Updated + +- Update `anomalib` library from version 0.7.0+obx.1.2.0 to 0.7.0+obx.1.2.1 +- Update default imagenette dir for efficient ad + ### [1.2.1] #### Added diff --git a/pyproject.toml b/pyproject.toml index b6c4f1b5..b499d894 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "quadra" -version = "1.2.1" +version = "1.2.2" description = "Deep Learning experiment orchestration library" authors = [ { name = "Alessandro Polidori", email = "alessandro.polidori@orobix.com" }, @@ -33,7 +33,7 @@ dependencies = [ # --------- pytorch --------- # "torch==1.13.1", "torchvision==0.14.1", - "torchmetrics==0.10.*", # constrained by anomalib + "torchmetrics==0.10.*", # constrained by anomalib "torchsummary==1.5.*", "pytorch-lightning>=1.9.1,<1.10", # --------- hydra --------- # @@ -42,8 +42,8 @@ dependencies = [ "hydra-optuna-sweeper==1.2.*", # --------- loggers --------- # "mlflow==2.3.1", - "boto3==1.26.*", # needed for artifact storage - "minio==7.1.*", # needed for artifact storage + "boto3==1.26.*", # needed for artifact storage + "minio==7.1.*", # needed for artifact storage "tensorboard==2.11.*", # --------- others --------- # "Pillow==9.3.0", # required by label-studio-converter @@ -61,9 +61,9 @@ dependencies = [ "scikit-multilearn==0.2.*", "tripy==1.0.*", "h5py==3.8.*", - "timm==0.6.12", # required by smp + "timm==0.6.12", # required by smp "segmentation-models-pytorch==0.3.*", - "anomalib@git+https://github.com/orobix/anomalib.git@v0.7.0+obx.1.2.0", + "anomalib@git+https://github.com/orobix/anomalib.git@v0.7.0+obx.1.2.1", "xxhash==3.2.*", ] @@ -118,7 +118,7 @@ repository = "https://github.com/orobix/quadra" # Adapted from https://realpython.com/pypi-publish-python-package/#version-your-package [tool.bumpver] -current_version = "1.2.1" +current_version = "1.2.2" version_pattern = "MAJOR.MINOR.PATCH" commit_message = "build: Bump version {old_version} -> {new_version}" commit = true diff --git a/quadra/__init__.py b/quadra/__init__.py index 3be8b6dc..3e678741 100644 --- a/quadra/__init__.py +++ b/quadra/__init__.py @@ -1,4 +1,4 @@ -__version__ = "1.2.1" +__version__ = "1.2.2" def get_version(): diff --git a/quadra/configs/model/anomalib/efficient_ad.yaml b/quadra/configs/model/anomalib/efficient_ad.yaml index 1f270b7d..1c3d0b06 100644 --- a/quadra/configs/model/anomalib/efficient_ad.yaml +++ b/quadra/configs/model/anomalib/efficient_ad.yaml @@ -6,7 +6,7 @@ model: teacher_out_channels: 384 model_size: small # options: [small, medium] lr: 0.0001 - image_size: [256, 256] + input_size: [256, 256] weight_decay: 0.00001 padding: false pad_maps: true # relevant for "padding: false", see EfficientAd in lightning_model.py @@ -14,7 +14,7 @@ model: normalization_method: min_max # options: [null, min_max, cdf] train_batch_size: 1 # ${datamodule.train_batch_size} pretrained_models_dir: ${oc.env:HOME}/.quadra/models/efficient_ad - imagenette_dir: ${oc.env:HOME}/.quadra/datasets/ + imagenette_dir: ${oc.env:HOME}/.quadra/datasets/imagenette_efficientad pretrained_teacher_type: nelson metrics: diff --git a/quadra/utils/tests/fixtures/models/anomaly.py b/quadra/utils/tests/fixtures/models/anomaly.py index 8281ea3a..311cc28b 100644 --- a/quadra/utils/tests/fixtures/models/anomaly.py +++ b/quadra/utils/tests/fixtures/models/anomaly.py @@ -1,6 +1,7 @@ import pytest import torch from anomalib.models.draem.torch_model import DraemModel +from anomalib.models.efficient_ad.torch_model import EfficientAdModel from anomalib.models.padim.torch_model import PadimModel from anomalib.models.patchcore.torch_model import PatchcoreModel @@ -67,3 +68,22 @@ def patchcore_resnet18(): def draem(): """Yield a draem model.""" yield DraemModel() + + +@pytest.fixture +def efficient_ad_small(): + """Yield a draem model.""" + + class EfficientAdForwardWrapper(EfficientAdModel): + """Wrap the forward method to avoid passing optional parameters.""" + + def forward(self, x): + return super().forward(x, None) + + model = EfficientAdForwardWrapper( + teacher_out_channels=384, + input_size=[256, 256], # TODO: This is hardcoded may be not a good idea + pretrained_teacher_type="nelson", + ) + + yield model diff --git a/tests/models/test_export.py b/tests/models/test_export.py index 8cfb6e4e..0291ec9d 100644 --- a/tests/models/test_export.py +++ b/tests/models/test_export.py @@ -5,6 +5,7 @@ import pytest import torch +from anomalib.models.efficient_ad.torch_model import EfficientAdModel from omegaconf import DictConfig from torch import nn @@ -13,6 +14,7 @@ dino_vitb8, dino_vits8, draem, + efficient_ad_small, padim_resnet18, patchcore_resnet18, resnet18, @@ -132,11 +134,15 @@ def test_segmentation_models_export(tmp_path: Path, model: nn.Module): pytest.lazy_fixture("padim_resnet18"), pytest.lazy_fixture("patchcore_resnet18"), pytest.lazy_fixture("draem"), + pytest.lazy_fixture("efficient_ad_small"), ], ) def test_anomaly_detection_models_export(tmp_path: Path, model: nn.Module): export_types = ["onnx", "torchscript"] - input_shapes = [(3, 224, 224)] + if isinstance(model, EfficientAdModel): + input_shapes = [(3, 256, 256)] + else: + input_shapes = [(3, 224, 224)] check_export_model_outputs(tmp_path=tmp_path, model=model, export_types=export_types, input_shapes=input_shapes) diff --git a/tests/tasks/test_anomaly.py b/tests/tasks/test_anomaly.py index dec4ccb9..d8fde0a6 100644 --- a/tests/tasks/test_anomaly.py +++ b/tests/tasks/test_anomaly.py @@ -162,7 +162,7 @@ def test_efficientad( "transforms.input_width=256", "model.model.train_batch_size=1", "datamodule.test_batch_size=1", - "model.model.image_size=[256, 256]", + "model.model.input_size=[256, 256]", "trainer.check_val_every_n_epoch= ${trainer.max_epochs}", f"model.model.imagenette_dir= {imagenette_path}", f"model.dataset.task={task}",