diff --git a/luma/__import__.py b/luma/__import__.py index 52207cf..dbc41ea 100644 --- a/luma/__import__.py +++ b/luma/__import__.py @@ -141,6 +141,7 @@ IncepResBlock, ResNetBlock, XceptionBlock, + MobileNetBlock, ) from luma.neural.model import ( SimpleMLP, @@ -349,7 +350,7 @@ ConvBlock1D, ConvBlock2D, ConvBlock3D, SeparableConv1D, SeparableConv2D, SeparableConv3D, DenseBlock, IncepBlock, IncepResBlock, ResNetBlock, - XceptionBlock + XceptionBlock, MobileNetBlock LayerNode, LayerGraph diff --git a/luma/neural/block/__init__.py b/luma/neural/block/__init__.py index 1818896..f5ce3e8 100644 --- a/luma/neural/block/__init__.py +++ b/luma/neural/block/__init__.py @@ -16,7 +16,6 @@ from luma.interface.typing import ClassType from luma.interface.util import InitUtil -from luma.neural.layer import * from luma.neural.block import ( incep_v1, incep_v2, @@ -42,13 +41,14 @@ "IncepResBlock", "ResNetBlock", "XceptionBlock", + "MobileNetBlock", ) @dataclass class ConvBlockArgs: filter_size: Tuple[int, ...] | int - activation: Activation.FuncType + activation: callable optimizer: Optimizer | None = None initializer: InitUtil.InitStr = None padding: Tuple[int, ...] | int | Literal["same", "valid"] = "same" @@ -359,7 +359,7 @@ class SeparableConv3D(standard._SeparableConv3D): @dataclass class DenseBlockArgs: - activation: Activation.FuncType + activation: callable optimizer: Optimizer | None = None initializer: InitUtil.InitStr = None lambda_: float = 0.0 @@ -410,7 +410,7 @@ class DenseBlock(standard._DenseBlock): @dataclass class BaseBlockArgs: - activation: Activation.FuncType + activation: callable optimizer: Optimizer | None = None initializer: InitUtil.InitStr = None lambda_: float = 0.0 @@ -868,4 +868,7 @@ class Exit(xception._Exit): """ -class InvertedResBlock(mobile._InvertedRes): ... +@ClassType.non_instantiable() +class MobileNetBlock: + + class InvertedRes(mobile._InvertedRes): ... diff --git a/luma/neural/block/mobile.py b/luma/neural/block/mobile.py index 6213f0e..78f1b1f 100644 --- a/luma/neural/block/mobile.py +++ b/luma/neural/block/mobile.py @@ -57,7 +57,7 @@ def __init__( if self.expand != 1: self[self.rt_].clear() self[self.rt_].append(self.pw_) - self.graph[self.pw] = [self.dw_pw_lin] + self.graph[self.pw_] = [self.dw_pw_lin] if self.do_shortcut: self[self.rt_].append(self.tm_)