Skip to content

Commit

Permalink
SEBlock wip
Browse files Browse the repository at this point in the history
  • Loading branch information
ChanLumerico committed Aug 30, 2024
1 parent 26e87d6 commit 17311eb
Show file tree
Hide file tree
Showing 19 changed files with 128 additions and 87 deletions.
10 changes: 5 additions & 5 deletions luma/neural/block/incep_res_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
class _IncepRes_V1_Stem(Sequential):
def __init__(
self,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -78,7 +78,7 @@ def out_shape(self, in_shape: Tuple[int]) -> Tuple[int]:
class _IncepRes_V1_TypeA(LayerGraph):
def __init__(
self,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -184,7 +184,7 @@ def out_shape(self, in_shape: Tuple[int]) -> Tuple[int]:
class _IncepRes_V1_TypeB(LayerGraph):
def __init__(
self,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -278,7 +278,7 @@ def out_shape(self, in_shape: Tuple[int]) -> Tuple[int]:
class _IncepRes_V1_TypeC(LayerGraph):
def __init__(
self,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -372,7 +372,7 @@ def out_shape(self, in_shape: Tuple[int]) -> Tuple[int]:
class _IncepRes_V1_Redux(LayerGraph):
def __init__(
self,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down
8 changes: 4 additions & 4 deletions luma/neural/block/incep_res_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
class _IncepRes_V2_TypeA(LayerGraph):
def __init__(
self,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -117,7 +117,7 @@ def out_shape(self, in_shape: Tuple[int]) -> Tuple[int]:
class _IncepRes_V2_TypeB(LayerGraph):
def __init__(
self,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -211,7 +211,7 @@ def out_shape(self, in_shape: Tuple[int]) -> Tuple[int]:
class _IncepRes_V2_TypeC(LayerGraph):
def __init__(
self,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -305,7 +305,7 @@ def out_shape(self, in_shape: Tuple[int]) -> Tuple[int]:
class _IncepRes_V2_Redux(LayerGraph):
def __init__(
self,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down
2 changes: 1 addition & 1 deletion luma/neural/block/incep_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def __init__(
red_5x5: int,
out_5x5: int,
out_pool: int,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down
8 changes: 4 additions & 4 deletions luma/neural/block/incep_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def __init__(
red_3x3_db: int,
out_3x3_db: Tuple[int, int],
out_pool: int,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -154,7 +154,7 @@ def __init__(
red_7x7_db: int,
out_7x7_db: Tuple[int, int],
out_pool: int,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -302,7 +302,7 @@ def __init__(
out_3x3: int,
out_1x3_3x1_after: Tuple[int, int],
out_pool: int,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -504,7 +504,7 @@ def __init__(
out_3x3: int,
red_3x3_db: int,
out_3x3_db: Tuple[int, int],
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down
12 changes: 6 additions & 6 deletions luma/neural/block/incep_v4.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
class _Incep_V4_Stem(LayerGraph):
def __init__(
self,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -138,7 +138,7 @@ def out_shape(self, in_shape: Tuple[int]) -> Tuple[int]:
class _Incep_V4_TypeA(LayerGraph):
def __init__(
self,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -241,7 +241,7 @@ def out_shape(self, in_shape: Tuple[int]) -> Tuple[int]:
class _Incep_V4_TypeB(LayerGraph):
def __init__(
self,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -353,7 +353,7 @@ def out_shape(self, in_shape: Tuple[int]) -> Tuple[int]:
class _Incep_V4_TypeC(LayerGraph):
def __init__(
self,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -493,7 +493,7 @@ def __init__(
self,
in_channels: int,
out_channels_arr: tuple[int, int, int, int],
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -581,7 +581,7 @@ def out_shape(self, in_shape: Tuple[int]) -> Tuple[int]:
class _Incep_V4_ReduxB(LayerGraph):
def __init__(
self,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down
2 changes: 1 addition & 1 deletion luma/neural/block/mobile.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def __init__(
out_channels: int,
stride: int = 1,
expand: int = 1,
activation: Activation.FuncType = Activation.ReLU6,
activation: callable = Activation.ReLU6,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down
6 changes: 3 additions & 3 deletions luma/neural/block/resnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def __init__(
out_channels: int,
stride: int = 1,
downsampling: LayerLike | None = None,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -113,7 +113,7 @@ def __init__(
out_channels: int,
stride: int = 1,
downsampling: LayerLike | None = None,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -214,7 +214,7 @@ def __init__(
out_channels: int,
stride: int = 1,
downsampling: LayerLike | None = None,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down
49 changes: 45 additions & 4 deletions luma/neural/block/standard.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ def __init__(
in_channels: int,
out_channels: int,
filter_size: Tuple[int] | int,
activation: Activation.FuncType,
activation: callable,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
padding: Tuple[int] | int | Literal["same", "valid"] = "same",
Expand Down Expand Up @@ -78,7 +78,7 @@ def __init__(
in_channels: int,
out_channels: int,
filter_size: Tuple[int, int] | int,
activation: Activation.FuncType,
activation: callable,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
padding: Tuple[int, int] | int | Literal["same", "valid"] = "same",
Expand Down Expand Up @@ -142,7 +142,7 @@ def __init__(
in_channels: int,
out_channels: int,
filter_size: Tuple[int, int, int] | int,
activation: Activation.FuncType,
activation: callable,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
padding: Tuple[int, int, int] | int | Literal["same", "valid"] = "same",
Expand Down Expand Up @@ -343,7 +343,7 @@ def __init__(
self,
in_features: int,
out_features: int,
activation: Activation.FuncType,
activation: callable,
optimizer: Optimizer = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -418,3 +418,44 @@ def backward(self, d_out: TensorLike) -> TensorLike:
continue
d_out = layer.backward(d_out)
return d_out


class _SEBlock(Sequential):
def __init__(
self,
in_channels: int,
reduction: int = 4,
activation: callable = Activation.ReLU,
optimizer: Optimizer = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
random_state: int | None = None,
) -> None:
basic_args = {
"initializer": initializer,
"lambda_": lambda_,
"random_state": random_state,
}

self.set_param_ranges(
{
"in_features": ("0<,+inf", int),
"reduction": (f"0<,{in_channels}", int),
"lambda_": ("0,+inf", None),
}
)
self.check_param_ranges()

super().__init__(
GlobalAvgPool2D(),
Flatten(),
Dense(in_channels, in_channels // reduction, **basic_args),
activation(),
Dense(in_channels // reduction, in_channels, **basic_args),
Activation.Sigmoid(),
)

if optimizer is not None:
self.set_optimizer(optimizer)

# TODO: forward, backward, and out_shape implementations
6 changes: 3 additions & 3 deletions luma/neural/block/xception.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
class _Entry(LayerGraph):
def __init__(
self,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -149,7 +149,7 @@ def out_shape(self, in_shape: Tuple[int]) -> Tuple[int]:
class _Middle(LayerGraph):
def __init__(
self,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down Expand Up @@ -220,7 +220,7 @@ def out_shape(self, in_shape: Tuple[int]) -> Tuple[int]:
class _Exit(LayerGraph):
def __init__(
self,
activation: Activation.FuncType = Activation.ReLU,
activation: callable = Activation.ReLU,
optimizer: Optimizer | None = None,
initializer: InitUtil.InitStr = None,
lambda_: float = 0.0,
Expand Down
1 change: 0 additions & 1 deletion luma/neural/layer/act.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@

@ClassType.non_instantiable()
class _Activation:
type FuncType = Type

@classmethod
def _out_shape(cls, in_shape: Tuple[int]) -> Tuple[int]:
Expand Down
1 change: 1 addition & 0 deletions luma/neural/layer/linear.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from typing import Tuple
import numpy as np

from interface.typing import TensorLike
from luma.core.super import Optimizer
from luma.interface.typing import Tensor, Matrix
from luma.interface.util import InitUtil
Expand Down
Loading

0 comments on commit 17311eb

Please sign in to comment.