Skip to content

Commit

Permalink
Merge branch 'main' into oneapi_separableconv
Browse files Browse the repository at this point in the history
  • Loading branch information
laurilaatu authored Dec 9, 2024
2 parents 6de4043 + f377fe0 commit 326b188
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 14 deletions.
20 changes: 12 additions & 8 deletions hls4ml/converters/pytorch/pooling.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,15 +90,19 @@ def parse_pooling_layer(operation, layer_name, input_names, input_shapes, node,
layer['stride_height'] = node.kwargs['stride'][0]
layer['stride_width'] = node.kwargs['stride'][1]
else:
layer['stride_height'] = node.kwargs['stride']
layer['stride_width'] = node.kwargs['stride']
if type(node.kwargs['kernel_size']) is tuple:
layer['pool_height'] = node.kwargs['kernel_size'][0]
layer['pool_width'] = node.kwargs['kernel_size'][1]
if node.kwargs['stride'] is None:
# if stride is not set it is supposed to default to the kernel size
layer['stride_height'] = node.args[1]
layer['stride_width'] = node.args[1]
else:
layer['stride_height'] = node.kwargs['stride']
layer['stride_width'] = node.kwargs['stride']
if type(node.args[1]) is tuple:
layer['pool_height'] = node.args[1][0]
layer['pool_width'] = node.args[1][1]
else:
layer['pool_height'] = node.kwargs['kernel_size']
layer['pool_width'] = node.kwargs['kernel_size']

layer['pool_height'] = node.args[1]
layer['pool_width'] = node.args[1]
if type(node.kwargs['padding']) is tuple:
padding = node.kwargs['padding']
else:
Expand Down
22 changes: 16 additions & 6 deletions hls4ml/converters/pytorch/reshape.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,13 +93,23 @@ def parse_flatten_layer(operation, layer_name, input_names, input_shapes, node,
layer['class_name'] = 'Reshape'
layer['name'] = layer_name
layer['inputs'] = input_names

start_dim = class_object.start_dim
end_dim = class_object.end_dim
if end_dim + 1 == 0 or end_dim + 1 > len(input_shapes[0]):
end_dim = len(input_shapes[0])
if node.op == 'call_module':
start_dim = class_object.start_dim
end_dim = class_object.end_dim
if end_dim + 1 == 0 or end_dim + 1 > len(input_shapes[0]):
end_dim = len(input_shapes[0])
else:
end_dim = end_dim + 1
else:
end_dim = end_dim + 1
start_dim = node.args[1]
if len(node.args) == 3:
end_dim = node.args[2]
else:
end_dim = -1
if end_dim + 1 == 0 or end_dim + 1 > len(input_shapes[0]):
end_dim = len(input_shapes[0])
else:
end_dim = end_dim + 1

layer['target_shape'] = (
input_shapes[0][0:start_dim] + [np.prod(input_shapes[0][start_dim:end_dim])] + input_shapes[0][end_dim:]
Expand Down

0 comments on commit 326b188

Please sign in to comment.