Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

onnx2keras #3

Open
wants to merge 6 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file added .DS_Store
Binary file not shown.
2 changes: 1 addition & 1 deletion .idea/misc.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion .idea/nn4mc_py.iml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions nn4mc/parser/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
from .hdf5_parser._hdf5parser import HDF5Parser
from .onnx_parser._onnxparser import ONNXParser
from .pytorch_parser._pytorch_parser import PYTorchParser
95 changes: 61 additions & 34 deletions nn4mc/parser/hdf5_parser/_hdf5parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@
from ._layerbuilder import *
import h5py
import numpy as np
import keras
from nn4mc.parser.hdf5_parser.helpers import bytesToJSON
import distutils.spawn
import os

# This class deals with parsing an HDF5 file from a keras neural network model.
# It will scrape the file and generate a NeuralNetwork object
Expand All @@ -20,6 +23,7 @@ class HDF5Parser(Parser):
'SimpleRNN' : 'SimpleRNNBuilder()',
'GRU' : 'GRUBuilder()',
'LSTM' : 'LSTMBuilder()',

'Input' : 'InputBuilder()',
'InputLayer': 'InputBuilder()',
'Activation' : 'ActivationBuilder()'}
Expand All @@ -46,11 +50,31 @@ def parse(self):
#Close the file
h5file.close()

def onnx_parse(self, h5file):
# Parse model configuration (i.e metadata)
model = keras.models.model_from_json(h5file.to_json())
model.set_weights(h5file.weights)
model.save('model.hdf5', overwrite = True)
h5file = h5py.File('model.hdf5', 'r')

self.parseModelConfig(h5file)

# Parse weights and biases
self.parseWeights(h5file, True)

# Close the file
h5file.close()

#Parses all of the layer metadata
#NOTE:
def parseModelConfig(self, h5file):

# with h5py.File(self.file_name, 'r') as h5file: #Open hdf5 file
#if not isinstance(h5file, keras.engine.functional.Functional):
configAttr = h5file['/'].attrs['model_config'] #Gets all metadata
#else:
# configAttr = h5file.to_json()

configJSON = bytesToJSON(configAttr)

self.parse_nn_input(configJSON['config'])
Expand Down Expand Up @@ -79,41 +103,44 @@ def parseModelConfig(self, h5file):

#Parses all of the weights
#NOTE:
def parseWeights(self, h5file):
def parseWeights(self, h5file, onnx_parser = False):
#if not isinstance(h5file, keras.engine.functional.Functional):
weightGroup = h5file['model_weights'] #Open weight group
# NOTE(sarahaguasvivas) here, the order matters,
# therefore, using different list
for layer in self.nn.iterate_layer_list():
id = layer.identifier
if id in weightGroup.keys() and 'max_pooling1d' not in id \
and 'max_pooling2d' not in id and 'flatten' not in id and \
'input' not in id:
# NOTE(sarahaguasvivas): kernel/weight assigment
gru_keys = [k for k, v in weightGroup[id][id].items() if 'gru_cell' in k]
if len(gru_keys) > 0:
weight = np.array(weightGroup[id][id][gru_keys[0]]['kernel:0'])
else:
weight = np.array(weightGroup[id][id]['kernel:0'][()])
# NOTE(sarahaguasvivas): bias
if len(gru_keys) > 0:
bias = np.array(weightGroup[id][id][gru_keys[0]]['bias:0'])
else:
bias = np.array(weightGroup[id][id]['bias:0'][()])
# NOTE(sarahaguasvivas): recurrent weights
if len(gru_keys) > 0:
rec_weight = np.array(weightGroup[id][id][gru_keys[0]]['recurrent_kernel:0'][()])
else:
rec_weight = None
layer.setParameters('weight', (id + '_W', weight))
layer.setParameters('bias', (id + '_b', bias))
layer.setParameters('weight_rec', (id + '_Wrec', rec_weight))

# NOTE(sarahaguasvivas): calculating output shapes
input_shape = self.nn_input_size
for layer in self.nn.iterate_layer_list():
if "input" not in layer.identifier:
input_shape = layer.computeOutShape(input_shape)
print(layer.getParameters())
if (not onnx_parser):
#if not isinstance(h5file, keras.engine.functional.Functional):
for layer in self.nn.iterate_layer_list():
id = layer.identifier
if id in weightGroup.keys() and 'max_pooling1d' not in id \
and 'max_pooling2d' not in id and 'flatten' not in id and \
'input' not in id:
# NOTE(sarahaguasvivas): kernel/weight assigment
gru_keys = [k for k, v in weightGroup[id][id].items() if 'gru_cell' in k]
if len(gru_keys) > 0:
weight = np.array(weightGroup[id][id][gru_keys[0]]['kernel:0'])
else:
weight = np.array(weightGroup[id][id]['kernel:0'][()])
# NOTE(sarahaguasvivas): bias
if len(gru_keys) > 0:
bias = np.array(weightGroup[id][id][gru_keys[0]]['bias:0'])
else:
bias = np.array(weightGroup[id][id]['bias:0'][()])
# NOTE(sarahaguasvivas): recurrent weights
if len(gru_keys) > 0:
rec_weight = np.array(weightGroup[id][id][gru_keys[0]]['recurrent_kernel:0'][()])
else:
rec_weight = None
layer.setParameters('weight', (id + '_W', weight))
layer.setParameters('bias', (id + '_b', bias))
layer.setParameters('weight_rec', (id + '_Wrec', rec_weight))

# NOTE(sarahaguasvivas): calculating output shapes
input_shape = self.nn_input_size
for layer in self.nn.iterate_layer_list():
if "input" not in layer.identifier:
input_shape = layer.computeOutShape(input_shape)
print(layer.getParameters())
else:
pass

#parses model for input size
def parse_nn_input(self, model_config : dict):
Expand Down
1 change: 1 addition & 0 deletions nn4mc/parser/hdf5_parser/_layerbuilder.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,7 @@ def build_layer(self, json_obj, id, layer_type):

return new_layer


class FlattenBuilder(LayerBuilder):
def build_layer(self, json_obj, id, layer_type):
new_layer = Flatten(id, layer_type)
Expand Down
129 changes: 129 additions & 0 deletions nn4mc/parser/onnx_parser/_onnxparser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
from nn4mc.parser._parser import Parser
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

TODO(sarahaguasvivas): Use test_translator to see if nn4mc doesn't crash with these models:

https://github.com/correlllab/nn4mc_py/blob/master/tests/test_translator/test_full.py

Create a new one called test_full_onnx.py

from nn4mc.datastructures import NeuralNetwork
# from ._layerbuilder import *
import h5py
import onnx
import keras
# from onnx2keras import onnx_to_keras
# from nn4mc.parser.onnx_parser.onnx_helpers import HDF5Parser
import numpy as np
from nn4mc.parser.onnx_parser.onnx_helpers import onnx2keras
from nn4mc.parser.hdf5_parser._hdf5parser import HDF5Parser
from tensorflow import keras

class ONNXParser(Parser):

def __init__(self, file):
self.file = file
self.nn = NeuralNetwork()
self.nn_input_size = None

def parse(self):
h5format = onnx2keras(self.file)

onnx_model = onnx.load(self.file)
# print(type(onnx_model))

h5parser = HDF5Parser(h5format)
h5parser.file = self.file
h5parser.onnx_parse(h5format)
self.nn = h5parser.nn

self.parseModelConfig(onnx_model)

# parse weights and biases
self.parseWeights(onnx_model)

# close the file
# h5format.close()

def parseModelConfig(self, h5file):

# with h5py.File(self.file_name, 'r') as h5file: #Open hdf5 file
#if not isinstance(h5file, keras.engine.functional.Functional):
configAttr = h5file['/'].attrs['model_config'] #Gets all metadata
#else:
# configAttr = h5file.to_json()

configJSON = bytesToJSON(configAttr)

self.parse_nn_input(configJSON['config'])

#This adds an input layer before everything, not sure if it is
#really neccessary.
#NOTE: Determine if this is neccessary
last_layer = Input('input_1','input')
self.nn.addLayer(last_layer)

#NOTE: Could check to see if its sequential here
for model_layer in configJSON['config']['layers']:
type_ = model_layer['class_name']
name = model_layer['config']['name']

if type_ in self.builder_map.keys():
builder = eval(self.builder_map[type_])

#Build a layer object from metadata
layer = builder.build_layer(model_layer['config'], name.lower(), type_.lower())

self.nn.addLayer(layer) #Add Layer to neural network
self.nn.addEdge(last_layer, layer)

last_layer = layer

def _parseONNX(self):
return self.parse.h5format

def parseWeights(self, h5file, _parseONNX = True):

weightGroup = h5file.graph.initializer

if (not _parseONNX):
pass

else:
for layer in self.nn.iterate_layer_list():

id = layer.identifier

if id in weightGroup and 'max_pooling1d' not in id \
and 'max_pooling2d' not in id and 'flatten' not in id and \
'input' not in id:

gru_keys = [k for k, v in weightGroup[id][id].items() if 'gru_cell' in k]
# kernel/weight assignment
if len(gru_keys) > 0:
weight = np.array(weightGroup[id][id][gru_keys[0]]['kernel:0'])
else:
weight = np.array(weightGroup[id][id]['kernel:0'][()])
# bias
if len(gru_keys) > 0:
bias = np.array(weightGroup[id][id][gru_keys[0]]['bias:0'])
else:
bias = np.array(weightGroup[id][id]['bias:0'][()])
# weight
if len(gru_keys) > 0:
rec_weight = np.array(weightGroup[id][id][gru_keys[0]]['recurrent_kernel:0'][()])
else:
rec_weight = None

layer.setParameters('weight', (id + '_W', weight))
layer.setParameters('bias', (id + '_b', bias))
layer.setParameters('weight_rec', (id + '_Wrec', rec_weight))

input_shape = self.nn_input_size
# print('aaaaa', input_shape)
for layer in self.nn.iterate_layer_list():
if "input" not in layer.identifier:
input_shape = layer.computeOutShape(input_shape)
print(layer.getParameters())

def parse_nn_input(self, model_config : dict):
"""
INPUT: model_config is the json object dictionary
OUTPUT: numpy array with the input size of the model
"""
if model_config.get('build_input_shape'):
self.nn_input_size = model_config['build_input_shape'][1:]
if model_config['layers'][0].get('config','batch_input_shape'):
self.nn_input_size = model_config['layers'][0]['config']
25 changes: 25 additions & 0 deletions nn4mc/parser/onnx_parser/onnx_helpers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import onnx
from onnx2keras import onnx_to_keras

# /*------------------------------------------------- onnx2keras -----
# | Function: onnx2keras
# |
# | Purpose: CONVERTING AN ONNX MODEL TO KERA VERSION
# |
# | Parameters: .onnx file
# |
# | Returns: Kera model
# *-------------------------------------------------------------------*/


def onnx2keras(file):
# Load ONNX model
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please add more comments on what the function does using this template:

        /*------------------------------------------------- FUNCTION_NAME -----
         |  Function FUNCTION_NAME
         |
         |  Purpose:  EXPLAIN WHAT THIS FUNCTION DOES TO SUPPORT THE CORRECT
         |      OPERATION OF THE PROGRAM, AND HOW IT DOES IT.
         |
         |  Parameters:
         |      parameter_name (IN, OUT, or IN/OUT) -- EXPLANATION OF THE
         |              PURPOSE OF THIS PARAMETER TO THE FUNCTION.
         |                      (REPEAT THIS FOR ALL FORMAL PARAMETERS OF
         |                       THIS FUNCTION.
         |                       IN = USED TO PASS DATA INTO THIS FUNCTION,
         |                       OUT = USED TO PASS DATA OUT OF THIS FUNCTION
         |                       IN/OUT = USED FOR BOTH PURPOSES.)
         |
         |  Returns:  IF THIS FUNCTION SENDS BACK A VALUE VIA THE RETURN
         |      MECHANISM, DESCRIBE THE PURPOSE OF THAT VALUE HERE.
         *-------------------------------------------------------------------*/

onnx_model = onnx.load(file)

input_all = [node.name for node in onnx_model.graph.input]

# Call the converter
k_model = onnx_to_keras(onnx_model, [input_all[0]])


return k_model
4 changes: 4 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
numpy
h5py
onnx2keras
onnx
Binary file added tests/data/resnet18-v2-7.onnx
Binary file not shown.
Empty file.
Binary file added tests/test_translator/functional.hdf5
Binary file not shown.
Binary file added tests/test_translator/model.hdf5
Binary file not shown.
Binary file added tests/test_translator/parser_weight_save.h5
Binary file not shown.
27 changes: 27 additions & 0 deletions tests/test_translator/test_full_onnx.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import nn4mc.parser as nnPr
import nn4mc.datastructures as nnDs
import nn4mc.generator as nnGn
import unittest
import os

class TestTranslator(unittest.TestCase):

def setUp(self):
pass

def test_file(self):
p = nnPr.ONNXParser('../data/resnet18-v2-7.onnx')

p.parse()

path = os.path.dirname(os.path.abspath(__file__))
if (not os.path.exists(os.path.join(path, 'output'))):
os.makedirs(os.path.join(path, 'output'))
path2 = os.path.join(path, 'output')

generator = nnGn.Generator(p.nn)

generator.generate(path2)

if __name__=='__main__':
unittest.main()
Binary file added tests/test_translator/weights.h5
Binary file not shown.