Skip to content

Commit

Permalink
Merge pull request #43 from NREL/bnb/concat_adder_init_hot_fix
Browse files Browse the repository at this point in the history
missed super().__init__() call in concat/adder. Setting ._name instea…
  • Loading branch information
bnb32 authored Oct 17, 2023
2 parents 9e88552 + 6855fd6 commit 17dde4a
Showing 1 changed file with 3 additions and 4 deletions.
7 changes: 3 additions & 4 deletions phygnn/layers/custom_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -478,8 +478,7 @@ def __init__(self, name):
Unique string identifier of the skip connection. The skip endpoint
should have the same name.
"""
super().__init__()
self._name = name
super().__init__(name=name)
self._cache = None

def call(self, x):
Expand Down Expand Up @@ -610,7 +609,7 @@ def __init__(self, name=None):
Unique str identifier of the adder layer. Usually the name of the
hi-resolution feature used in the addition.
"""
self.name = name
super().__init__(name=name)

def call(self, x, hi_res_adder):
"""Adds hi-resolution data to the input tensor x in the middle of a
Expand Down Expand Up @@ -645,7 +644,7 @@ def __init__(self, name=None):
Unique str identifier for the concat layer. Usually the name of the
hi-resolution feature used in the concatenation.
"""
self.name = name
super().__init__(name=name)

def call(self, x, hi_res_feature):
"""Concatenates a hi-resolution feature to the input tensor x in the
Expand Down

0 comments on commit 17dde4a

Please sign in to comment.