diff --git a/luma/neural/_specials/incep_res_v2.py b/luma/neural/_specials/incep_res_v2.py index c77ed6c..53d2022 100644 --- a/luma/neural/_specials/incep_res_v2.py +++ b/luma/neural/_specials/incep_res_v2.py @@ -208,7 +208,94 @@ def out_shape(self, in_shape: Tuple[int]) -> Tuple[int]: return batch_size, 1154, 17, 17 -class _IncepRes_V2_TypeC(LayerGraph): ... +class _IncepRes_V2_TypeC(LayerGraph): + def __init__( + self, + activation: Activation.FuncType = Activation.ReLU, + optimizer: Optimizer | None = None, + initializer: InitUtil.InitStr = None, + lambda_: float = 0.0, + do_batch_norm: bool = True, + momentum: float = 0.9, + random_state: int | None = None, + ) -> None: + self.activation = activation + self.optimizer = optimizer + self.initializer = initializer + self.lambda_ = lambda_ + self.do_batch_norm = do_batch_norm + self.momentum = momentum + + self.basic_args = { + "initializer": initializer, + "lambda_": lambda_, + "random_state": random_state, + } + + self.init_nodes() + super(_IncepRes_V2_TypeC, self).__init__( + graph={ + self.rt_: [self.res_sum, self.br_a, self.br_b], + self.br_a: [self.br_cat], + self.br_b: [self.br_cat], + self.br_cat: [self.res_sum], + }, + root=self.rt_, + term=self.res_sum, + ) + + def init_nodes(self) -> None: + self.rt_ = LayerNode(Identity(), name="rt_") + self.res_sum = LayerNode( + Sequential(Identity(), self.activation()), + merge_mode="sum", + name="res_sum", + ) + + self.br_a = LayerNode( + Sequential( + Convolution2D(2048, 192, 1, 1, "same", **self.basic_args), + BatchNorm2D(192, self.momentum), + self.activation(), + ), + name="br_a", + ) + self.br_b = LayerNode( + Sequential( + Convolution2D(2048, 192, 1, 1, "same", **self.basic_args), + BatchNorm2D(192, self.momentum), + self.activation(), + Convolution2D(192, 234, (1, 3), 1, "same", **self.basic_args), + BatchNorm2D(224, self.momentum), + self.activation(), + Convolution1D(224, 256, (3, 1), 1, "same", **self.basic_args), + BatchNorm2D(256, self.momentum), + self.activation(), + ), + name="br_b", + ) + + self.br_cat = LayerNode( + Sequential( + Convolution2D(448, 2048, 1, 1, "same", **self.basic_args), + BatchNorm2D(2048, self.momentum), + ), + merge_mode="chcat", + name="br_cat", + ) + + @Tensor.force_dim(4) + def forward(self, X: TensorLike, is_train: bool = False) -> TensorLike: + return super().forward(X, is_train) + + @Tensor.force_dim(4) + def backward(self, d_out: TensorLike) -> TensorLike: + return super().backward(d_out) + + @override + def out_shape(self, in_shape: Tuple[int]) -> Tuple[int]: + batch_size, _, _, _ = in_shape + return batch_size, 2048, 8, 8 class _IncepRes_V2_Redux(LayerGraph): ...