diff --git a/build/bnn-pynq/build.py b/build/bnn-pynq/build.py index b4c5929..b6001cf 100644 --- a/build/bnn-pynq/build.py +++ b/build/bnn-pynq/build.py @@ -44,6 +44,8 @@ "cnv-w2a2", ] +verif_en = os.getenv("VERIFICATION_EN", "0") + # which platforms to build the networks for zynq_platforms = ["Pynq-Z1", "Ultra96", "ZCU104"] alveo_platforms = ["U250"] @@ -86,15 +88,33 @@ def platform_to_shell(platform): board=platform_name, shell_flow_type=shell_flow_type, vitis_platform=vitis_platform, - generate_outputs=[build_cfg.DataflowOutputType.BITFILE], + generate_outputs=[ + build_cfg.DataflowOutputType.BITFILE, + build_cfg.DataflowOutputType.STITCHED_IP, + ], save_intermediate_models=True, default_swg_exception=True, specialize_layers_config_file="specialize_layers_config/%s_specialize_layers.json" % model_name, ) model_file = "models/%s.onnx" % model_name - # launch FINN compiler to build - build.build_dataflow_cfg(model_file, cfg) + + if verif_en == "1": + # Build the model with verification + import sys + + sys.path.append(os.path.abspath(os.getenv("FINN_EXAMPLES_ROOT") + "/ci/")) + from verification_funcs import init_verif, verify_build_output + + cfg.verify_steps, cfg.verify_input_npy, cfg.verify_expected_output_npy = init_verif( + model_name + ) + build.build_dataflow_cfg(model_file, cfg) + verify_build_output(cfg, model_name) + else: + # Build the model without verification + build.build_dataflow_cfg(model_file, cfg) + # copy bitfiles into release dir if found bitfile_gen_dir = cfg.output_dir + "/bitfile" files_to_check_and_copy = [ diff --git a/build/cybersecurity-mlp/build.py b/build/cybersecurity-mlp/build.py index f97058f..db6c0ca 100644 --- a/build/cybersecurity-mlp/build.py +++ b/build/cybersecurity-mlp/build.py @@ -32,6 +32,11 @@ import os import shutil +# Define model name +model_name = "unsw_nb15-mlp-w2a2" + +verif_en = os.getenv("VERIFICATION_EN", "0") + # Which platforms to build the networks for zynq_platforms = ["Pynq-Z1", "Ultra96", "ZCU104"] alveo_platforms = [] @@ -50,9 +55,6 @@ def platform_to_shell(platform): raise Exception("Unknown platform, can't determine ShellFlowType") -# Define model name -model_name = "unsw_nb15-mlp-w2a2" - # Create a release dir, used for finn-examples release packaging os.makedirs("release", exist_ok=True) @@ -85,13 +87,28 @@ def platform_to_shell(platform): build_cfg.DataflowOutputType.ESTIMATE_REPORTS, build_cfg.DataflowOutputType.BITFILE, build_cfg.DataflowOutputType.DEPLOYMENT_PACKAGE, + build_cfg.DataflowOutputType.STITCHED_IP, ], - save_intermediate_models=True, ) model = "models/%s.onnx" % model_name # Launch FINN compiler to generate bitfile - build.build_dataflow_cfg(model, cfg) + if verif_en == "1": + # Build the model with verification + import sys + + sys.path.append(os.path.abspath(os.getenv("FINN_EXAMPLES_ROOT") + "/ci/")) + from verification_funcs import init_verif, verify_build_output + + cfg.verify_steps, cfg.verify_input_npy, cfg.verify_expected_output_npy = init_verif( + model_name + ) + build.build_dataflow_cfg(model, cfg) + verify_build_output(cfg, model_name) + else: + # Build the model without verification + build.build_dataflow_cfg(model, cfg) + # Copy bitfiles into release dir if found bitfile_gen_dir = cfg.output_dir + "/bitfile" filtes_to_check_and_copy = ["finn-accel.bit", "finn-accel.hwh", "finn-accel.xclbin"] diff --git a/build/gtsrb/build.py b/build/gtsrb/build.py index e3cacac..668c266 100644 --- a/build/gtsrb/build.py +++ b/build/gtsrb/build.py @@ -28,20 +28,25 @@ import finn.builder.build_dataflow as build import finn.builder.build_dataflow_config as build_cfg +from finn.util.basic import alveo_default_platform from finn.builder.build_dataflow_config import default_build_dataflow_steps +from qonnx.transformation.insert_topk import InsertTopK from qonnx.core.datatype import DataType import os import shutil import numpy as np from onnx import helper as oh -models = [ - "cnv_1w1a_gtsrb", -] + +model_name = "cnv_1w1a_gtsrb" +model_file = "models/%s.onnx" % model_name + +verif_en = os.getenv("VERIFICATION_EN", "0") # which platforms to build the networks for zynq_platforms = ["Pynq-Z1"] -platforms_to_build = zynq_platforms +alveo_platforms = [] +platforms_to_build = zynq_platforms + alveo_platforms def custom_step_add_preproc(model, cfg): @@ -63,16 +68,27 @@ def custom_step_add_preproc(model, cfg): model.graph.node[1].input[0] = new_in_name # set input dtype to uint8 model.set_tensor_datatype(in_name, DataType["UINT8"]) + return model -custom_build_steps = [custom_step_add_preproc] + default_build_dataflow_steps +# Insert TopK node to get predicted Top-1 class +def custom_step_add_postproc(model, cfg): + model = model.transform(InsertTopK(k=1)) + return model + + +custom_build_steps = ( + [custom_step_add_preproc] + [custom_step_add_postproc] + default_build_dataflow_steps +) # determine which shell flow to use for a given platform def platform_to_shell(platform): if platform in zynq_platforms: return build_cfg.ShellFlowType.VIVADO_ZYNQ + elif platform in alveo_platforms: + return build_cfg.ShellFlowType.VITIS_ALVEO else: raise Exception("Unknown platform, can't determine ShellFlowType") @@ -82,45 +98,64 @@ def platform_to_shell(platform): for platform_name in platforms_to_build: shell_flow_type = platform_to_shell(platform_name) - vitis_platform = None - # for Zynq, use the board name as the release name - # e.g. ZCU104 - release_platform_name = platform_name + if shell_flow_type == build_cfg.ShellFlowType.VITIS_ALVEO: + vitis_platform = alveo_default_platform[platform_name] + # for Alveo, use the Vitis platform name as the release name + # e.g. xilinx_u250_xdma_201830_2 + release_platform_name = vitis_platform + else: + vitis_platform = None + # for Zynq, use the board name as the release name + # e.g. ZCU104 + release_platform_name = platform_name platform_dir = "release/%s" % release_platform_name os.makedirs(platform_dir, exist_ok=True) - for model_name in models: - # set up the build configuration for this model - cfg = build_cfg.DataflowBuildConfig( - output_dir="output_%s_%s" % (model_name, release_platform_name), - target_fps=3000, - synth_clk_period_ns=10.0, - board=platform_name, - steps=custom_build_steps, - folding_config_file="folding_config/gtsrb_folding_config.json", - shell_flow_type=shell_flow_type, - vitis_platform=vitis_platform, - generate_outputs=[ - build_cfg.DataflowOutputType.ESTIMATE_REPORTS, - build_cfg.DataflowOutputType.STITCHED_IP, - build_cfg.DataflowOutputType.RTLSIM_PERFORMANCE, - build_cfg.DataflowOutputType.BITFILE, - build_cfg.DataflowOutputType.DEPLOYMENT_PACKAGE, - build_cfg.DataflowOutputType.PYNQ_DRIVER, - ], - specialize_layers_config_file="specialize_layers_config/gtsrb_specialize_layers.json", + # set up the build configuration for this model + cfg = build_cfg.DataflowBuildConfig( + output_dir="output_%s_%s" % (model_name, release_platform_name), + target_fps=3000, + synth_clk_period_ns=10.0, + board=platform_name, + steps=custom_build_steps, + folding_config_file="folding_config/gtsrb_folding_config.json", + specialize_layers_config_file="specialize_layers_config/gtsrb_specialize_layers.json", + shell_flow_type=shell_flow_type, + vitis_platform=vitis_platform, + generate_outputs=[ + build_cfg.DataflowOutputType.ESTIMATE_REPORTS, + build_cfg.DataflowOutputType.STITCHED_IP, + build_cfg.DataflowOutputType.RTLSIM_PERFORMANCE, + build_cfg.DataflowOutputType.BITFILE, + build_cfg.DataflowOutputType.DEPLOYMENT_PACKAGE, + build_cfg.DataflowOutputType.PYNQ_DRIVER, + ], + ) + # launch FINN compiler to build + if verif_en == "1": + # Build the model with verification + import sys + + sys.path.append(os.path.abspath(os.getenv("FINN_EXAMPLES_ROOT") + "/ci/")) + from verification_funcs import init_verif, verify_build_output + + cfg.verify_steps, cfg.verify_input_npy, cfg.verify_expected_output_npy = init_verif( + model_name ) - model_file = "models/%s.onnx" % model_name - # launch FINN compiler to build build.build_dataflow_cfg(model_file, cfg) - # copy bitfiles into release dir if found - bitfile_gen_dir = cfg.output_dir + "/bitfile" - files_to_check_and_copy = [ - "finn-accel.bit", - "finn-accel.hwh", - "finn-accel.xclbin", - ] - for f in files_to_check_and_copy: - src_file = bitfile_gen_dir + "/" + f - dst_file = platform_dir + "/" + f.replace("finn-accel", model_name) - if os.path.isfile(src_file): - shutil.copy(src_file, dst_file) + verify_build_output(cfg, model_name) + else: + # Build the model without verification + build.build_dataflow_cfg(model_file, cfg) + + # copy bitfiles into release dir if found + bitfile_gen_dir = cfg.output_dir + "/bitfile" + files_to_check_and_copy = [ + "finn-accel.bit", + "finn-accel.hwh", + "finn-accel.xclbin", + ] + for f in files_to_check_and_copy: + src_file = bitfile_gen_dir + "/" + f + dst_file = platform_dir + "/" + f.replace("finn-accel", model_name) + if os.path.isfile(src_file): + shutil.copy(src_file, dst_file) diff --git a/build/kws/build.py b/build/kws/build.py index b73cee9..4ea2f6d 100644 --- a/build/kws/build.py +++ b/build/kws/build.py @@ -41,6 +41,11 @@ import os import shutil +model_name = "MLP_W3A3_python_speech_features_pre-processing_QONNX_opset-11" +model_file = "models/" + model_name + ".onnx" + +verif_en = os.getenv("VERIFICATION_EN", "0") + # Inject the preprocessing step into FINN to enable json serialization later on def step_preprocess(model: ModelWrapper, cfg: DataflowBuildConfig): @@ -60,21 +65,6 @@ def step_preprocess(model: ModelWrapper, cfg: DataflowBuildConfig): build_cfg.DataflowOutputType.BITFILE, build_cfg.DataflowOutputType.DEPLOYMENT_PACKAGE, ] -verification_steps = [ - build_cfg.VerificationStepType.QONNX_TO_FINN_PYTHON, - build_cfg.VerificationStepType.TIDY_UP_PYTHON, - build_cfg.VerificationStepType.STREAMLINED_PYTHON, - build_cfg.VerificationStepType.FOLDED_HLS_CPPSIM, -] - -model_name = "MLP_W3A3_python_speech_features_pre-processing_QONNX_opset-11" -model_file = "models/" + model_name + ".onnx" - -# Change the ONNX opset from version 9 to 11, which adds support for the TopK node -model = ModelWrapper(model_file) -model.model.opset_import[0].version = 11 -model_file = model_file.replace(".onnx", "_opset-11.onnx") -model.save(model_file) # create a release dir, used for finn-examples release packaging @@ -88,8 +78,6 @@ def step_preprocess(model: ModelWrapper, cfg: DataflowBuildConfig): last_output_dir = "output_%s_%s" % (model_name, release_platform_name) # Configure build cfg = build_cfg.DataflowBuildConfig( - # steps=estimate_steps, generate_outputs=estimate_outputs, - verify_steps=verification_steps, steps=build_steps, generate_outputs=build_outputs, output_dir=last_output_dir, @@ -98,11 +86,24 @@ def step_preprocess(model: ModelWrapper, cfg: DataflowBuildConfig): board=platform_name, shell_flow_type=build_cfg.ShellFlowType.VIVADO_ZYNQ, stitched_ip_gen_dcp=True, - verify_save_full_context=True, specialize_layers_config_file="specialize_layers_config/kws_specialize_layers.json", ) - # Build the model - build.build_dataflow_cfg(model_file, cfg) + + if verif_en == "1": + # Build the model with verification + import sys + + sys.path.append(os.path.abspath(os.getenv("FINN_EXAMPLES_ROOT") + "/ci/")) + from verification_funcs import init_verif, verify_build_output + + cfg.verify_steps, cfg.verify_input_npy, cfg.verify_expected_output_npy = init_verif( + model_name + ) + build.build_dataflow_cfg(model_file, cfg) + verify_build_output(cfg, model_name) + else: + # Build the model without verification + build.build_dataflow_cfg(model_file, cfg) # copy bitfiles and runtime weights into release dir if found bitfile_gen_dir = cfg.output_dir + "/bitfile" @@ -117,7 +118,6 @@ def step_preprocess(model: ModelWrapper, cfg: DataflowBuildConfig): if os.path.isfile(src_file): shutil.copy(src_file, dst_file) - # Export quantized inputs print("Quantizing validation dataset.") parent_model = ModelWrapper(last_output_dir + "/intermediate_models/dataflow_parent.onnx") diff --git a/build/kws/expected_output.npy b/build/kws/expected_output.npy deleted file mode 100644 index 0058afe..0000000 Binary files a/build/kws/expected_output.npy and /dev/null differ diff --git a/build/kws/input.npy b/build/kws/input.npy deleted file mode 100644 index d257362..0000000 Binary files a/build/kws/input.npy and /dev/null differ diff --git a/build/mobilenet-v1/build.py b/build/mobilenet-v1/build.py index 59b655d..3d909af 100644 --- a/build/mobilenet-v1/build.py +++ b/build/mobilenet-v1/build.py @@ -43,6 +43,10 @@ ) model_name = "mobilenetv1-w4a4" +model_file = "models/%s_pre_post_tidy_opset-11.onnx" % model_name + +verif_en = os.getenv("VERIFICATION_EN", "0") + # which platforms to build the networks for zynq_platforms = ["ZCU104", "ZCU102"] @@ -101,6 +105,7 @@ def select_build_steps(platform): "step_hw_codegen", "step_hw_ipgen", "step_set_fifo_depths", + "step_create_stitched_ip", step_mobilenet_slr_floorplan, "step_synthesize_bitfile", "step_make_pynq_driver", @@ -111,7 +116,6 @@ def select_build_steps(platform): # create a release dir, used for finn-examples release packaging os.makedirs("release", exist_ok=True) - for platform_name in platforms_to_build: shell_flow_type = platform_to_shell(platform_name) if shell_flow_type == build_cfg.ShellFlowType.VITIS_ALVEO: @@ -144,12 +148,28 @@ def select_build_steps(platform): build_cfg.DataflowOutputType.ESTIMATE_REPORTS, build_cfg.DataflowOutputType.BITFILE, build_cfg.DataflowOutputType.DEPLOYMENT_PACKAGE, + build_cfg.DataflowOutputType.STITCHED_IP, ], specialize_layers_config_file="specialize_layers_config/%s_specialize_layers.json" % platform_name, ) - model_file = "models/%s_pre_post_tidy_opset-11.onnx" % model_name - build.build_dataflow_cfg(model_file, cfg) + if verif_en == "1": + # Build the model with verification + import sys + + sys.path.append(os.path.abspath(os.getenv("FINN_EXAMPLES_ROOT") + "/ci/")) + from verification_funcs import init_verif, verify_build_output + + cfg.verify_steps, cfg.verify_input_npy, cfg.verify_expected_output_npy = init_verif( + model_name + ) + if "stitched_ip_rtlsim" in cfg.verify_steps: + cfg.verify_steps.remove("stitched_ip_rtlsim") + build.build_dataflow_cfg(model_file, cfg) + verify_build_output(cfg, model_name) + else: + # Build the model without verification + build.build_dataflow_cfg(model_file, cfg) # copy bitfiles and runtime weights into release dir if found bitfile_gen_dir = cfg.output_dir + "/bitfile" diff --git a/build/mobilenet-v1/custom_steps.py b/build/mobilenet-v1/custom_steps.py index 6cd54af..618809c 100644 --- a/build/mobilenet-v1/custom_steps.py +++ b/build/mobilenet-v1/custom_steps.py @@ -29,7 +29,9 @@ from finn.builder.build_dataflow_config import ( DataflowBuildConfig, ShellFlowType, + VerificationStepType, ) +from finn.builder.build_dataflow_steps import verify_step from finn.transformation.streamline import Streamline from qonnx.transformation.double_to_single_float import DoubleToSingleFloat import finn.transformation.streamline.absorb as absorb @@ -72,6 +74,10 @@ def step_mobilenet_streamline(model: ModelWrapper, cfg: DataflowBuildConfig): model = model.transform(GiveUniqueNodeNames()) model = model.transform(GiveReadableTensorNames()) model = model.transform(InferDataTypes()) + + if VerificationStepType.STREAMLINED_PYTHON in cfg._resolve_verification_steps(): + verify_step(model, cfg, "streamlined_python", need_parent=False) + return model diff --git a/build/resnet50/build.py b/build/resnet50/build.py index ccde8dc..5c953e6 100644 --- a/build/resnet50/build.py +++ b/build/resnet50/build.py @@ -47,6 +47,18 @@ synth_clk_period_ns = 4.0 target_fps = 300 +verif_en = os.getenv("VERIFICATION_EN", "0") + +# which platforms to build the networks for +zynq_platforms = [] +alveo_platforms = ["U250"] +platforms_to_build = zynq_platforms + alveo_platforms + +model_file = "models/%s_exported.onnx" % model_name +# create a release dir, used for finn-examples release packaging +os.makedirs("release", exist_ok=True) + + resnet50_build_steps = [ step_resnet50_tidy, step_resnet50_streamline, @@ -65,15 +77,6 @@ "step_deployment_package", ] -# which platforms to build the networks for -zynq_platforms = [] -alveo_platforms = ["U250"] -platforms_to_build = zynq_platforms + alveo_platforms - -model_file = "models/%s_exported.onnx" % model_name -# create a release dir, used for finn-examples release packaging -os.makedirs("release", exist_ok=True) - # determine which shell flow to use for a given platform def platform_to_shell(platform): @@ -123,7 +126,21 @@ def platform_to_shell(platform): build_cfg.DataflowOutputType.DEPLOYMENT_PACKAGE, ], ) - build.build_dataflow_cfg(model_file, cfg) + if verif_en == "1": + # Build the model with verification + import sys + + sys.path.append(os.path.abspath(os.getenv("FINN_EXAMPLES_ROOT") + "/ci/")) + from verification_funcs import init_verif, verify_build_output + + cfg.verify_steps, cfg.verify_input_npy, cfg.verify_expected_output_npy = init_verif( + model_name + ) + build.build_dataflow_cfg(model_file, cfg) + verify_build_output(cfg, model_name) + else: + # Build the model without verification + build.build_dataflow_cfg(model_file, cfg) # copy bitfiles and runtime weights into release dir if found bitfile_gen_dir = cfg.output_dir + "/bitfile" diff --git a/build/resnet50/custom_steps.py b/build/resnet50/custom_steps.py index 6679be6..f0f3f93 100644 --- a/build/resnet50/custom_steps.py +++ b/build/resnet50/custom_steps.py @@ -90,9 +90,11 @@ import finn.transformation.fpgadataflow.convert_to_hw_layers as to_hw from qonnx.transformation.lower_convs_to_matmul import LowerConvsToMatMul +from finn.builder.build_dataflow_steps import verify_step from finn.builder.build_dataflow_config import ( DataflowBuildConfig, ShellFlowType, + VerificationStepType, ) from finn.transformation.move_reshape import RemoveCNVtoFCFlatten @@ -111,6 +113,9 @@ def step_resnet50_tidy(model: ModelWrapper, cfg: DataflowBuildConfig): model = model.transform(GiveUniqueNodeNames()) model = model.transform(GiveReadableTensorNames()) model = model.transform(InferDataTypes()) + + if VerificationStepType.TIDY_UP_PYTHON in cfg._resolve_verification_steps(): + verify_step(model, cfg, "initial_python", need_parent=False) return model @@ -169,6 +174,8 @@ def step_resnet50_streamline(model: ModelWrapper, cfg: DataflowBuildConfig): model = model.transform(SortGraph()) model = model.transform(DoubleToSingleFloat()) + if VerificationStepType.STREAMLINED_PYTHON in cfg._resolve_verification_steps(): + verify_step(model, cfg, "streamlined_python", need_parent=False) return model diff --git a/build/vgg10-radioml/README.md b/build/vgg10-radioml/README.md index 18df19d..36cdf95 100755 --- a/build/vgg10-radioml/README.md +++ b/build/vgg10-radioml/README.md @@ -21,7 +21,7 @@ FINN_EXAMPLES=/path/to/finn-examples # cd into finn submodule cd $FINN_EXAMPLES/build/finn # launch the build on the vgg10 folder -./run-docker.sh build_custom $FINN_EXAMPLES/build/vgg10 +./run-docker.sh build_custom $FINN_EXAMPLES/build/vgg10-radioml ``` 3. The generated outputs will be under `vgg10-radioml/output__`. You can find a description of the generated files [here](https://finn-dev.readthedocs.io/en/latest/command_line.html#simple-dataflow-build-mode). diff --git a/build/vgg10-radioml/build.py b/build/vgg10-radioml/build.py index edc93fd..38d0992 100755 --- a/build/vgg10-radioml/build.py +++ b/build/vgg10-radioml/build.py @@ -36,6 +36,9 @@ from custom_steps import step_pre_streamline, step_convert_final_layers model_name = "radioml_w4a4_small_tidy" +model_file = "models/%s.onnx" % model_name + +verif_en = os.getenv("VERIFICATION_EN", "0") # which platforms to build the networks for zynq_platforms = ["ZCU104"] @@ -119,14 +122,29 @@ def select_build_steps(platform): generate_outputs=[ build_cfg.DataflowOutputType.ESTIMATE_REPORTS, build_cfg.DataflowOutputType.STITCHED_IP, - # build_cfg.DataflowOutputType.RTLSIM_PERFORMANCE, + build_cfg.DataflowOutputType.RTLSIM_PERFORMANCE, build_cfg.DataflowOutputType.BITFILE, build_cfg.DataflowOutputType.DEPLOYMENT_PACKAGE, build_cfg.DataflowOutputType.PYNQ_DRIVER, ], ) - model_file = "models/%s.onnx" % model_name - build.build_dataflow_cfg(model_file, cfg) + if verif_en == "1": + # Build the model with verification + import sys + + sys.path.append(os.path.abspath(os.getenv("FINN_EXAMPLES_ROOT") + "/ci/")) + from verification_funcs import init_verif, verify_build_output + + cfg.verify_steps, cfg.verify_input_npy, cfg.verify_expected_output_npy = init_verif( + model_name + ) + if "folded_hls_cppsim" in cfg.verify_steps: + cfg.verify_steps.remove("folded_hls_cppsim") + build.build_dataflow_cfg(model_file, cfg) + verify_build_output(cfg, model_name) + else: + # Build the model without verification + build.build_dataflow_cfg(model_file, cfg) # copy bitfiles and runtime weights into release dir if found bitfile_gen_dir = cfg.output_dir + "/bitfile" diff --git a/ci/verification_funcs.py b/ci/verification_funcs.py new file mode 100644 index 0000000..ad0896e --- /dev/null +++ b/ci/verification_funcs.py @@ -0,0 +1,92 @@ +import logging +import os +import sys + + +def create_logger(): + # Create a logger to capture output in both console and log file + logger = logging.getLogger("verif_logger") + out_handler = logging.StreamHandler(sys.stdout) + file_handler = logging.FileHandler("all_verification_output.log", mode="w") + out_handler.setLevel(logging.INFO) + file_handler.setLevel(logging.INFO) + out_format = logging.Formatter("%(message)s") + file_format = logging.Formatter("%(message)s") + out_handler.setFormatter(out_format) + file_handler.setFormatter(file_format) + logger.addHandler(out_handler) + logger.addHandler(file_handler) + + +def set_verif_steps(): + # Set verification steps + verif_steps = [ + "finn_onnx_python", + "initial_python", + "streamlined_python", + "folded_hls_cppsim", + "node_by_node_rtlsim", + "stitched_ip_rtlsim", + ] + return verif_steps + + +def set_verif_io(model_name): + io_folder = os.getenv("VERIFICATION_IO") + # Set the paths of input/expected output files for verification, + # using the model name + if "tfc-w" in model_name: + # All mnist and cifar10 models use the same i/o + verif_input = "%s/tfc_mnist_input.npy" % io_folder + verif_output = "%s/tfc_mnist_output.npy" % io_folder + elif "cnv-w" in model_name: + verif_input = "%s/cnv_cifar10_input.npy" % io_folder + verif_output = "%s/cnv_cifar10_output.npy" % io_folder + else: + verif_input = "%s/%s_input.npy" % (io_folder, model_name) + verif_output = "%s/%s_output.npy" % (io_folder, model_name) + return verif_input, verif_output + + +def init_verif(model_name): + if not logging.getLogger("verif_logger").hasHandlers(): + create_logger() + verif_steps = set_verif_steps() + verif_input, verif_output = set_verif_io(model_name) + return verif_steps, verif_input, verif_output + + +def verify_build_output(cfg, model_name): + logger = logging.getLogger("verif_logger") + verif_output_dir = cfg.output_dir + "/verification_output" + if os.path.isdir(verif_output_dir) is False: + logger.info( + "Verification is enabled, " + "but verification output for %s on %s has not been generated. " + "Please run full build with verification enabled.\n" % (model_name, cfg.board) + ) + return + logger.info("\n*****************************************************") + logger.info("Verification Results for %s on %s" % (model_name, cfg.board)) + logger.info("*****************************************************") + + # Using output verification files, print whether verification was + # success or failure, by iterating through the step names and + # the output file names and comparing them + out_files = os.listdir(verif_output_dir) + for step_name in cfg.verify_steps: + for file_name in out_files: + if step_name in file_name: + # Output file will always end in _SUCCESS.npy or _FAIL.npy + # (or .npz if verify_save_full_context is enabled), + # so check the last few characters of the filename + # to see if it is SUCCESS or FAIL + if file_name[-8:-4] == "FAIL": + logger.info("Verification for step %-22s: FAIL" % step_name) + elif file_name[-11:-4] == "SUCCESS": + logger.info("Verification for step %-22s: SUCCESS" % step_name) + break + else: + # File for the step was not found, so assume the step was skipped + logger.info("Verification for step %-22s: IO FILE NOT FOUND - SKIPPED" % step_name) + logger.info(" ") diff --git a/tests/test_jupyter_notebooks.py b/tests/test_jupyter_notebooks.py index e0e86c0..3058e68 100644 --- a/tests/test_jupyter_notebooks.py +++ b/tests/test_jupyter_notebooks.py @@ -1,98 +1,291 @@ import pytest +import json import nbformat +import os +import re from nbconvert.preprocessors import ExecutePreprocessor notebook_timeout_seconds = 3600 notebook_dir = "./finn_examples/notebooks/" + +# Needed for iterating through each mnist and cifar10 model +# within their respective notebooks, formatted as: +# models = [ +# (model_name, expected_accuracy), +# ... +# ] +mnist_models = [ + ("tfc_w1a1_mnist", 92.96), + ("tfc_w1a2_mnist", 94.74), + ("tfc_w2a2_mnist", 96.6), +] + +cifar10_models = [ + ("cnv_w1a1_cifar10", 84.19), + ("cnv_w1a2_cifar10", 87.76), + ("cnv_w2a2_cifar10", 88.63), +] + +# List of all notebooks to be tested, formatted as: +# all_notebooks = [ +# ( +# notebook_name, +# model_name, +# expected_returned_label, +# expected_accuracy, +# ) +# ... +# ] + +all_notebooks = [ + ( + # model name and accuracy will be assigned using mnist_models during test + notebook_dir + "/0_mnist_with_fc_networks.ipynb", + " ", + "Returned class is 7", + " ", + ), + ( + # model name and accuracy will be assigned using cifar10_models during test + notebook_dir + "/1_cifar10_with_cnv_networks.ipynb", + " ", + "Returned class is 3", + " ", + ), + ( + notebook_dir + "/2_imagenet_with_cnns.ipynb", + "mobilenetv1_w4a4_imagenet", + " ", + 70.406, + ), + ( + notebook_dir + "/3_binarycop_mask_detection.ipynb", + "bincop_cnv", + " ", + " ", + ), + ( + notebook_dir + "/4_keyword_spotting.ipynb", + "kws_mlp", + "The audio file was classified as: yes", + 88.7646, + ), + ( + notebook_dir + "/5_radioml_with_cnns.ipynb", + "vgg_w4a4_radioml", + "Top-1 class predicted by the accelerator: 16QAM", + 87.886, + ), + ( + notebook_dir + "/6_cybersecurity_with_mlp.ipynb", + "mlp_w2a2_unsw_nb15", + "Returned label is: 0 (normal data)", + 91.90, + ), + ( + notebook_dir + "/7_traffic_sign_recognition_gtsrb.ipynb", + "cnv_w1a1_gtsrb", + "Accelerator result is:\nProhibited for vehicles with a " + "permitted gross weight over 3.5t including their trailers, " + "and for tractors except passenger cars and buses", + 94.9485, + ), +] + +# List of notebooks for each platform pynq_notebooks = [ + # 0_mnist_with_fc_networks.ipynb pytest.param( - notebook_dir + "0_mnist_with_fc_networks.ipynb", + all_notebooks[0][0], + all_notebooks[0][1], + all_notebooks[0][2], + all_notebooks[0][3], marks=pytest.mark.pynq_notebooks, ), + # 1_cifar10_with_cnv_networks.ipynb pytest.param( - notebook_dir + "1_cifar10_with_cnv_networks.ipynb", + all_notebooks[1][0], + all_notebooks[1][1], + all_notebooks[1][2], + all_notebooks[1][3], marks=pytest.mark.pynq_notebooks, ), + # 3_binarycop_mask_detection.ipynb pytest.param( - notebook_dir + "3_binarycop_mask_detection.ipynb", + all_notebooks[3][0], + all_notebooks[3][1], + all_notebooks[3][2], + all_notebooks[3][3], marks=[pytest.mark.pynq_notebooks, pytest.mark.xfail], ), + # 4_keyword_spotting.ipynb + pytest.param( + all_notebooks[4][0], + all_notebooks[4][1], + all_notebooks[4][2], + all_notebooks[4][3], + marks=pytest.mark.pynq_notebooks, + ), + # 6_cybersecurity_with_mlp.ipynb pytest.param( - notebook_dir + "4_keyword_spotting.ipynb", + all_notebooks[6][0], + all_notebooks[6][1], + all_notebooks[6][2], + all_notebooks[6][3], marks=pytest.mark.pynq_notebooks, ), + # 7_traffic_sign_recognition_gtsrb.ipynb pytest.param( - notebook_dir + "6_cybersecurity_with_mlp.ipynb", + all_notebooks[7][0], + all_notebooks[7][1], + all_notebooks[7][2], + all_notebooks[7][3], marks=pytest.mark.pynq_notebooks, ), ] zcu_notebooks = [ + # 0_mnist_with_fc_networks.ipynb pytest.param( - notebook_dir + "0_mnist_with_fc_networks.ipynb", + all_notebooks[0][0], + all_notebooks[0][1], + all_notebooks[0][2], + all_notebooks[0][3], marks=pytest.mark.zcu_notebooks, ), + # 1_cifar10_with_cnv_networks.ipynb pytest.param( - notebook_dir + "1_cifar10_with_cnv_networks.ipynb", + all_notebooks[1][0], + all_notebooks[1][1], + all_notebooks[1][2], + all_notebooks[1][3], marks=pytest.mark.zcu_notebooks, ), + # 5_radioml_with_cnns.ipynb pytest.param( - notebook_dir + "5_radioml_with_cnns.ipynb", + all_notebooks[5][0], + all_notebooks[5][1], + all_notebooks[5][2], + all_notebooks[5][3], marks=pytest.mark.zcu_notebooks, ), + # 6_cybersecurity_with_mlp.ipynb pytest.param( - notebook_dir + "6_cybersecurity_with_mlp.ipynb", + all_notebooks[6][0], + all_notebooks[6][1], + all_notebooks[6][2], + all_notebooks[6][3], marks=pytest.mark.zcu_notebooks, ), ] ultra96_notebooks = [ + # 0_mnist_with_fc_networks.ipynb pytest.param( - notebook_dir + "0_mnist_with_fc_networks.ipynb", + all_notebooks[0][0], + all_notebooks[0][1], + all_notebooks[0][2], + all_notebooks[0][3], marks=pytest.mark.ultra96_notebooks, ), + # 1_cifar10_with_cnv_networks.ipynb pytest.param( - notebook_dir + "1_cifar10_with_cnv_networks.ipynb", + all_notebooks[1][0], + all_notebooks[1][1], + all_notebooks[1][2], + all_notebooks[1][3], marks=pytest.mark.ultra96_notebooks, ), + # 6_cybersecurity_with_mlp.ipynb pytest.param( - notebook_dir + "6_cybersecurity_with_mlp.ipynb", + all_notebooks[6][0], + all_notebooks[6][1], + all_notebooks[6][2], + all_notebooks[6][3], marks=pytest.mark.ultra96_notebooks, ), ] alveo_notebooks = [ + # 0_mnist_with_fc_networks.ipynb pytest.param( - notebook_dir + "0_mnist_with_fc_networks.ipynb", + all_notebooks[0][0], + all_notebooks[0][1], + all_notebooks[0][2], + all_notebooks[0][3], marks=pytest.mark.alveo_notebooks, ), + # 1_cifar10_with_cnv_networks.ipynb pytest.param( - notebook_dir + "1_cifar10_with_cnv_networks.ipynb", + all_notebooks[1][0], + all_notebooks[1][1], + all_notebooks[1][2], + all_notebooks[1][3], marks=pytest.mark.alveo_notebooks, ), + # 2_imagenet_with_cnns.ipynb pytest.param( - notebook_dir + "2_imagenet_with_cnns.ipynb", - marks=pytest.mark.alveo_notebooks, + all_notebooks[2][0], + all_notebooks[2][1], + all_notebooks[2][2], + all_notebooks[2][3], + marks=[pytest.mark.alveo_notebooks, pytest.mark.xfail], ), ] -@pytest.mark.parametrize( - "notebook", pynq_notebooks + zcu_notebooks + ultra96_notebooks + alveo_notebooks -) -def test_notebook_exec(notebook): +def get_notebook_exec_result(notebook, model_name, exp_label, exp_acc): + # Read and execute the notebook with open(notebook) as f: nb = nbformat.read(f, as_version=4) - ep = ExecutePreprocessor(timeout=notebook_timeout_seconds, kernel_name="python3") + ep = ExecutePreprocessor(timeout=notebook_timeout_seconds, kernel_name="python3") + ep.preprocess(nb) - # debug only for now... - notebook_dump = notebook.replace(".ipynb", ".dump") - with open(notebook_dump, "w") as f: - f.write(str(ep.preprocess(nb))) + # Read in the executed notebook as a json + exec_notebook = notebook.replace(".ipynb", "_exec.ipynb") + with open(exec_notebook, "w", encoding="utf-8") as f: + nbformat.write(nb, f) + with open(exec_notebook) as f: + test_json_exec = json.load(f) + + # For checking if the correct class was predicted + res = False + + # Get outputs of notebook json + for cell in test_json_exec["cells"]: + if "outputs" in cell: + for output in cell["outputs"]: + if "text" in output: + out_text = "".join(output["text"]) + if exp_label in out_text: + # Expected class was predicted + res = True + if "accuracy" in out_text.casefold(): + # Parse the accuracy value and check if it is as expected + nb_acc = float(re.findall("\\d+\\.\\d+", out_text)[-1]) + assert nb_acc >= exp_acc, f"Accuracy test for {model_name} FAILED" + + assert res is True, f"Classification test for {model_name} FAILED" + + +@pytest.mark.parametrize( + "notebook,model_name,exp_label,exp_acc", + pynq_notebooks + zcu_notebooks + ultra96_notebooks + alveo_notebooks, +) +def test_notebook_exec(notebook, model_name, exp_label, exp_acc): + if "mnist" in notebook: + for mnist_model_name, mnist_exp_acc in mnist_models: + os.system("sed -i '27s/.*/\"accel = models.%s()\"/' %s" % (mnist_model_name, notebook)) + get_notebook_exec_result(notebook, mnist_model_name, exp_label, mnist_exp_acc) + elif "cifar10" in notebook: + for cifar10_model_name, cifar10_exp_acc in cifar10_models: + os.system( + "sed -i '26s/.*/\"accel = models.%s()\"/' %s" % (cifar10_model_name, notebook) + ) + get_notebook_exec_result(notebook, cifar10_model_name, exp_label, cifar10_exp_acc) + else: + get_notebook_exec_result(notebook, model_name, exp_label, exp_acc) - try: - assert ep.preprocess(nb) is not None, f"Got empty notebook for {notebook}" - except Exception: - assert False, f"Failed executing {notebook}" + os.system("rm -rf %s/*_exec*" % notebook_dir)