From a0078dd9f74f607926fced2012131b31ad07da02 Mon Sep 17 00:00:00 2001 From: Gabriel Gutierrez Date: Tue, 12 Nov 2024 20:48:21 -0300 Subject: [PATCH] Remove debug print statement and ensure tensor type conversion in PatchInferencerEngine and ResizedMetric --- minerva/analysis/metrics/transformed_metrics.py | 2 +- minerva/engines/patch_inferencer_engine.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/minerva/analysis/metrics/transformed_metrics.py b/minerva/analysis/metrics/transformed_metrics.py index e527ce5..b394b1f 100644 --- a/minerva/analysis/metrics/transformed_metrics.py +++ b/minerva/analysis/metrics/transformed_metrics.py @@ -177,5 +177,5 @@ def resize(self, x: torch.Tensor) -> torch.Tensor: elif self.target_w_size is None: scale = target_h_size / h target_w_size = int(w * scale) - + x = x.to(torch.uint8) if x.type() == "torch.LongTensor" else x return torch.nn.functional.interpolate(x, size=(target_h_size, target_w_size)) diff --git a/minerva/engines/patch_inferencer_engine.py b/minerva/engines/patch_inferencer_engine.py index f9c050e..74185b5 100644 --- a/minerva/engines/patch_inferencer_engine.py +++ b/minerva/engines/patch_inferencer_engine.py @@ -251,7 +251,6 @@ def _combine_patches( ) reconstructed.append(reconstruct) weights.append(weight) - print(reconstruct.shape) reconstructed = torch.stack(reconstructed, dim=0) weights = torch.stack(weights, dim=0) return torch.sum(reconstructed * weights, dim=0) / torch.sum(weights, dim=0)