This repository has been archived by the owner on Dec 8, 2021. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 6
/
convert_tf2keras_to_tf1frozen.py
68 lines (57 loc) · 2.54 KB
/
convert_tf2keras_to_tf1frozen.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import os
import sys
import tensorflow as tf
#from tensorflow import keras
from tensorflow.python.framework.convert_to_constants import convert_variables_to_constants_v2
#import numpy as np
from tensorflow.keras.models import load_model
#from tensorflow.python.tools import optimize_for_inference_lib
from absl import flags, app
#from subprocess import call
FLAGS = flags.FLAGS
#flags.DEFINE_string('myflag', 'Some default string', 'The value of myflag.')
flags.DEFINE_string('input', '', 'input h5 model path with weights')
flags.DEFINE_string('output', '', 'output frozen model path')
def main(argv):
#print("Architecture: ", FLAGS.architecture)
#architecture = FLAGS.architecture
print("Output path ", FLAGS.output)
output_path = FLAGS.output
print("Model input ", FLAGS.input)
model_path = FLAGS.input
frozen_out_path = ''# name of the .pb file
#frozen_graph_filename = architecture+"/frozen_"+model
frozen_graph_filename = output_path
#model = load_model(architecture+"/"+model+".h5")
model = load_model(model_path)
full_model = tf.function(lambda x: model(x))
full_model = full_model.get_concrete_function(tf.TensorSpec(model.inputs[0].shape, model.inputs[0].dtype))# Get frozen ConcreteFunction
frozen_func = convert_variables_to_constants_v2(full_model)
frozen_func.graph.as_graph_def()
print(frozen_func.inputs)
print(frozen_func.outputs)
layers = [op.name for op in frozen_func.graph.get_operations()]
print("-" * 60)
print("Frozen model layers: ")
for layer in layers:
print(layer)
print("-" * 60)
print("Frozen model inputs: ")
print(frozen_func.inputs)
print("Frozen model outputs: ")
print(frozen_func.outputs)# Save frozen graph to disk
tf.io.write_graph(graph_or_graph_def=frozen_func.graph,
logdir=frozen_out_path,
name=f"{frozen_graph_filename}",
as_text=False)# Save its text representation
"""
tf.io.write_graph(graph_or_graph_def=frozen_func.graph,
logdir=frozen_out_path,
name=f"{frozen_graph_filename}.pbtxt",
as_text=True)
"""
#rc = call("python -m tensorflow.python.tools.optimize_for_inference --input ./"+architecture+"/frozen_graph.pb --output ./"+architecture+"/optimized_graph.pb --frozen_graph=True --input_names=x --output_names=Identity")
print("Network converted to frozen model.")
sys.exit(0)
if __name__ == '__main__':
app.run(main)