Skip to content

Commit

Permalink
fix formatting issues
Browse files Browse the repository at this point in the history
  • Loading branch information
hvgazula committed Mar 19, 2024
1 parent 87278e2 commit 29db911
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 9 deletions.
1 change: 0 additions & 1 deletion nobrainer/models/attention_unet.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
Adapted from https://github.com/nikhilroxtomar/Semantic-Segmentation-Architecture/blob/main/TensorFlow/attention-unet.py
"""

import tensorflow as tf
from tensorflow.keras import layers
import tensorflow.keras.layers as L
from tensorflow.keras.models import Model
Expand Down
15 changes: 10 additions & 5 deletions nobrainer/models/attention_unet_with_inception.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@

def expend_as(tensor, rep):
# Anonymous lambda function to expand the specified axis by a factor of argument, rep.
# If tensor has shape (512,512,N), lambda will return a tensor of shape (512,512,N*rep), if specified axis=2
# If tensor has shape (512,512,N), lambda will return a tensor of shape
# (512,512,N*rep), if specified axis=2

my_repeat = layers.Lambda(
lambda x, repnum: K.repeat_elements(x, repnum, axis=4),
Expand All @@ -29,7 +30,8 @@ def conv3d_block(
dilation_rate=1,
recurrent=1,
):
# A wrapper of the Keras Conv3D block to serve as a building block for downsampling layers
# A wrapper of the Keras Conv3D block to serve as a building block for
# downsampling layers
# Includes options to use batch normalization, dilation and recurrence

conv = layers.Conv3D(
Expand Down Expand Up @@ -123,7 +125,8 @@ def transpose_block(
batchnorm=True,
recurrent=1,
):
# A wrapper of the Keras Conv3DTranspose block to serve as a building block for upsampling layers
# A wrapper of the Keras Conv3DTranspose block to serve as a building block
# for upsampling layers

shape_x = K.int_shape(input_tensor)
shape_xskip = K.int_shape(skip_tensor)
Expand Down Expand Up @@ -165,10 +168,12 @@ def inception_block(
layers_list=[],
):
# Inception-style convolutional block similar to InceptionNet
# The first convolution follows the function arguments, while subsequent inception convolutions follow the parameters in
# The first convolution follows the function arguments, while subsequent
# inception convolutions follow the parameters in
# argument, layers

# layers is a nested list containing the different secondary inceptions in the format of (kernel_size, dil_rate)
# layers is a nested list containing the different secondary inceptions in
# the format of (kernel_size, dil_rate)

# E.g => layers=[ [(3,1),(3,1)], [(5,1)], [(3,1),(3,2)] ]
# This will implement 3 sets of secondary convolutions
Expand Down
6 changes: 3 additions & 3 deletions nobrainer/models/unetr.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""UNETR implementation in Tensorflow 2.0.
Adapted from https://github.com/nikhilroxtomar/Semantic-Segmentation-Architecture/blob/main/TensorFlow/unetr.py
Adapted from https://www.kaggle.com/code/usharengaraju/tensorflow-unetr-w-b
"""
import math

Expand Down Expand Up @@ -140,7 +140,7 @@ def call(self, inputs):
return self.b(self.c(tf.nn.relu(self.a(inputs))))


##embeddings, projection_dim=embed_dim
# embeddings, projection_dim=embed_dim
class PatchEmbedding(tf.keras.layers.Layer):
def __init__(self, cube_size, patch_size, embed_dim):
super(PatchEmbedding, self).__init__()
Expand Down Expand Up @@ -184,7 +184,7 @@ def call(self, inputs):
return patches, positionalEmbedding


##transformerblock
# transformerblock
class TransformerLayer(tf.keras.layers.Layer):
def __init__(self, embed_dim, num_heads, dropout, cube_size, patch_size):
super(TransformerLayer, self).__init__()
Expand Down

0 comments on commit 29db911

Please sign in to comment.