Skip to content

Commit

Permalink
additional review updates
Browse files Browse the repository at this point in the history
  • Loading branch information
pkgoogle committed Dec 3, 2024
1 parent e1a597d commit 5ed664f
Show file tree
Hide file tree
Showing 5 changed files with 84 additions and 78 deletions.
22 changes: 12 additions & 10 deletions keras_hub/src/models/efficientnet/cba.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,6 @@

BN_AXIS = 3

CONV_KERNEL_INITIALIZER = {
"class_name": "VarianceScaling",
"config": {
"scale": 2.0,
"mode": "fan_out",
"distribution": "truncated_normal",
},
}


class CBABlock(keras.layers.Layer):
"""
Expand Down Expand Up @@ -75,7 +66,7 @@ def __init__(
filters=self.output_filters,
kernel_size=kernel_size,
strides=strides,
kernel_initializer=CONV_KERNEL_INITIALIZER,
kernel_initializer=self._conv_kernel_initializer(),
padding="valid",
data_format=data_format,
use_bias=False,
Expand All @@ -98,6 +89,17 @@ def __init__(
name=self.name + "drop",
)

def _conv_kernel_initializer(
self,
scale=2.0,
mode="fan_out",
distribution="truncated_normal",
seed=None,
):
return keras.initializers.VarianceScaling(
scale=scale, mode=mode, distribution=distribution, seed=seed
)

def build(self, input_shape):
if self.name is None:
self.name = keras.backend.get_uid("block0")
Expand Down
28 changes: 14 additions & 14 deletions keras_hub/src/models/efficientnet/efficientnet_backbone.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,10 @@ class EfficientNetBackbone(FeaturePyramidBackbone):
(https://arxiv.org/abs/2104.00298) (ICML 2021)
Args:
stackwise_width_coefficient: list[float] or float, scaling coefficient
stackwise_width_coefficients: list[float], scaling coefficient
for network width. If single float, it is assumed that this value
applies to all stacks.
stackwise_depth_coefficient: list[float] or float, scaling coefficient
stackwise_depth_coefficients: list[float], scaling coefficient
for network depth. If single float, it is assumed that this value
applies to all stacks.
stackwise_kernel_sizes: list of ints, the kernel sizes used for each
Expand Down Expand Up @@ -106,8 +106,8 @@ class EfficientNetBackbone(FeaturePyramidBackbone):
def __init__(
self,
*,
stackwise_width_coefficient=None,
stackwise_depth_coefficient=None,
stackwise_width_coefficients=None,
stackwise_depth_coefficients=None,
stackwise_kernel_sizes,
stackwise_num_repeats,
stackwise_input_filters,
Expand Down Expand Up @@ -136,11 +136,11 @@ def __init__(
):
num_stacks = len(stackwise_kernel_sizes)
if "depth_coefficient" in kwargs:
stackwise_depth_coefficient = [
stackwise_depth_coefficients = [
kwargs.pop("depth_coefficient")
] * num_stacks
if "width_coefficient" in kwargs:
stackwise_width_coefficient = [
stackwise_width_coefficients = [
kwargs.pop("width_coefficient")
] * num_stacks

Expand All @@ -156,7 +156,7 @@ def __init__(
# Build stem
stem_filters = round_filters(
filters=stackwise_input_filters[0],
width_coefficient=stackwise_width_coefficient[0],
width_coefficient=stackwise_width_coefficients[0],
min_depth=min_depth,
depth_divisor=depth_divisor,
use_depth_divisor_as_min_depth=use_depth_divisor_as_min_depth,
Expand Down Expand Up @@ -194,8 +194,8 @@ def __init__(
output_filters = stackwise_output_filters[i]
force_input_filters = stackwise_force_input_filters[i]
nores = stackwise_nores_option[i]
stack_width_coefficient = stackwise_width_coefficient[i]
stack_depth_coefficient = stackwise_depth_coefficient[i]
stack_width_coefficient = stackwise_width_coefficients[i]
stack_depth_coefficient = stackwise_depth_coefficients[i]

# Update block input and output filters based on depth multiplier.
input_filters = round_filters(
Expand Down Expand Up @@ -297,7 +297,7 @@ def __init__(
# Build top
top_filters = round_filters(
filters=num_features,
width_coefficient=stackwise_width_coefficient[-1],
width_coefficient=stackwise_width_coefficients[-1],
min_depth=min_depth,
depth_divisor=depth_divisor,
use_depth_divisor_as_min_depth=use_depth_divisor_as_min_depth,
Expand Down Expand Up @@ -330,8 +330,8 @@ def __init__(
super().__init__(inputs=image_input, outputs=x, **kwargs)

# === Config ===
self.stackwise_width_coefficient = stackwise_width_coefficient
self.stackwise_depth_coefficient = stackwise_depth_coefficient
self.stackwise_width_coefficients = stackwise_width_coefficients
self.stackwise_depth_coefficients = stackwise_depth_coefficients
self.dropout = dropout
self.depth_divisor = depth_divisor
self.min_depth = min_depth
Expand Down Expand Up @@ -361,8 +361,8 @@ def get_config(self):
config = super().get_config()
config.update(
{
"stackwise_width_coefficient": self.stackwise_width_coefficient,
"stackwise_depth_coefficient": self.stackwise_depth_coefficient,
"stackwise_width_coefficients": self.stackwise_width_coefficients,
"stackwise_depth_coefficients": self.stackwise_depth_coefficients,
"dropout": self.dropout,
"depth_divisor": self.depth_divisor,
"min_depth": self.min_depth,
Expand Down
28 changes: 15 additions & 13 deletions keras_hub/src/models/efficientnet/fusedmbconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,6 @@

BN_AXIS = 3

CONV_KERNEL_INITIALIZER = {
"class_name": "VarianceScaling",
"config": {
"scale": 2.0,
"mode": "fan_out",
"distribution": "truncated_normal",
},
}


class FusedMBConvBlock(keras.layers.Layer):
"""Implementation of the FusedMBConv block
Expand Down Expand Up @@ -116,7 +107,7 @@ def __init__(
filters=self.filters,
kernel_size=kernel_size,
strides=strides,
kernel_initializer=CONV_KERNEL_INITIALIZER,
kernel_initializer=self._conv_kernel_initializer(),
padding="valid",
data_format=data_format,
use_bias=False,
Expand All @@ -138,7 +129,7 @@ def __init__(
padding="same",
data_format=data_format,
activation=self.activation,
kernel_initializer=CONV_KERNEL_INITIALIZER,
kernel_initializer=self._conv_kernel_initializer(),
name=self.name + "se_reduce",
)

Expand All @@ -148,7 +139,7 @@ def __init__(
padding="same",
data_format=data_format,
activation="sigmoid",
kernel_initializer=CONV_KERNEL_INITIALIZER,
kernel_initializer=self._conv_kernel_initializer(),
name=self.name + "se_expand",
)

Expand All @@ -161,7 +152,7 @@ def __init__(
filters=self.output_filters,
kernel_size=projection_kernel_size,
strides=1,
kernel_initializer=CONV_KERNEL_INITIALIZER,
kernel_initializer=self._conv_kernel_initializer(),
padding="valid",
data_format=data_format,
use_bias=False,
Expand All @@ -187,6 +178,17 @@ def __init__(
name=self.name + "drop",
)

def _conv_kernel_initializer(
self,
scale=2.0,
mode="fan_out",
distribution="truncated_normal",
seed=None,
):
return keras.initializers.VarianceScaling(
scale=scale, mode=mode, distribution=distribution, seed=seed
)

def build(self, input_shape):
if self.name is None:
self.name = keras.backend.get_uid("block0")
Expand Down
30 changes: 16 additions & 14 deletions keras_hub/src/models/efficientnet/mbconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,6 @@

BN_AXIS = 3

CONV_KERNEL_INITIALIZER = {
"class_name": "VarianceScaling",
"config": {
"scale": 2.0,
"mode": "fan_out",
"distribution": "truncated_normal",
},
}


class MBConvBlock(keras.layers.Layer):
def __init__(
Expand Down Expand Up @@ -99,7 +90,7 @@ def __init__(
filters=self.filters,
kernel_size=1,
strides=1,
kernel_initializer=CONV_KERNEL_INITIALIZER,
kernel_initializer=self._conv_kernel_initializer(),
padding="same",
data_format=data_format,
use_bias=False,
Expand All @@ -117,7 +108,7 @@ def __init__(
self.depthwise = keras.layers.DepthwiseConv2D(
kernel_size=self.kernel_size,
strides=self.strides,
depthwise_initializer=CONV_KERNEL_INITIALIZER,
depthwise_initializer=self._conv_kernel_initializer(),
padding="same",
data_format=data_format,
use_bias=False,
Expand All @@ -137,7 +128,7 @@ def __init__(
padding="same",
data_format=data_format,
activation=self.activation,
kernel_initializer=CONV_KERNEL_INITIALIZER,
kernel_initializer=self._conv_kernel_initializer(),
name=self.name + "se_reduce",
)

Expand All @@ -147,7 +138,7 @@ def __init__(
padding="same",
data_format=data_format,
activation="sigmoid",
kernel_initializer=CONV_KERNEL_INITIALIZER,
kernel_initializer=self._conv_kernel_initializer(),
name=self.name + "se_expand",
)

Expand All @@ -161,7 +152,7 @@ def __init__(
filters=self.output_filters,
kernel_size=projection_kernel_size,
strides=1,
kernel_initializer=CONV_KERNEL_INITIALIZER,
kernel_initializer=self._conv_kernel_initializer(),
padding="valid",
data_format=data_format,
use_bias=False,
Expand All @@ -182,6 +173,17 @@ def __init__(
name=self.name + "drop",
)

def _conv_kernel_initializer(
self,
scale=2.0,
mode="fan_out",
distribution="truncated_normal",
seed=None,
):
return keras.initializers.VarianceScaling(
scale=scale, mode=mode, distribution=distribution, seed=seed
)

def build(self, input_shape):
if self.name is None:
self.name = keras.backend.get_uid("block0")
Expand Down
Loading

0 comments on commit 5ed664f

Please sign in to comment.